summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/context/scripts/mkiv/mtx-epub.html55
-rw-r--r--doc/context/scripts/mkiv/mtx-epub.man42
-rw-r--r--doc/context/scripts/mkiv/mtx-epub.xml28
-rw-r--r--fonts/map/pdftex/context/mkiv-base.map182
-rw-r--r--scripts/context/lua/mtx-epub.lua637
-rw-r--r--scripts/context/lua/mtx-update.lua2
-rw-r--r--scripts/context/lua/mtxrun.lua20
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua20
-rwxr-xr-xscripts/context/stubs/unix/mtxrun20
-rw-r--r--scripts/context/stubs/win64/mtxrun.lua20
-rw-r--r--tex/context/base/back-exp.lua781
-rw-r--r--tex/context/base/back-exp.mkiv6
-rw-r--r--tex/context/base/char-ini.mkiv1
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4385 -> 4386 bytes
-rw-r--r--tex/context/base/context.mkiv3
-rw-r--r--tex/context/base/data-res.lua3
-rw-r--r--tex/context/base/data-sch.lua14
-rw-r--r--tex/context/base/export-example.css24
-rw-r--r--tex/context/base/font-ctx.lua118
-rw-r--r--tex/context/base/font-ini.mkvi17
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-otf.lua61
-rw-r--r--tex/context/base/grph-inc.lua103
-rw-r--r--tex/context/base/lang-hyp.lua663
-rw-r--r--tex/context/base/lang-hyp.mkiv109
-rw-r--r--tex/context/base/lxml-ini.mkiv1
-rw-r--r--tex/context/base/math-fbk.lua42
-rw-r--r--tex/context/base/math-ini.lua46
-rw-r--r--tex/context/base/math-ini.mkiv7
-rw-r--r--tex/context/base/math-int.mkiv2
-rw-r--r--tex/context/base/math-stc.mkvi277
-rw-r--r--tex/context/base/math-tag.lua546
-rw-r--r--tex/context/base/mult-def.mkiv6
-rw-r--r--tex/context/base/node-ltp.lua6
-rw-r--r--tex/context/base/spac-ver.lua27
-rw-r--r--tex/context/base/spac-ver.mkiv3
-rw-r--r--tex/context/base/status-files.pdfbin24768 -> 24748 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin324988 -> 325287 bytes
-rw-r--r--tex/context/base/status-mkiv.lua12
-rw-r--r--tex/context/base/strc-tag.lua10
-rw-r--r--tex/context/base/strc-tag.mkiv4
-rw-r--r--tex/context/base/typo-mar.lua9
-rw-r--r--tex/context/base/x-math-svg.lua162
-rw-r--r--tex/context/base/x-mathml.mkiv266
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua56
46 files changed, 3425 insertions, 990 deletions
diff --git a/doc/context/scripts/mkiv/mtx-epub.html b/doc/context/scripts/mkiv/mtx-epub.html
new file mode 100644
index 000000000..9b63234be
--- /dev/null
+++ b/doc/context/scripts/mkiv/mtx-epub.html
@@ -0,0 +1,55 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+<!-- compare with lmx framework variant -->
+
+<!--
+ filename : context-base.xml
+ comment : companion to mtx-server-ctx-startup.tex
+ author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+ copyright: PRAGMA ADE / ConTeXt Development Team
+ license : see context related readme files
+-->
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>ConTeXt EPUB Helpers 1.10</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">ConTeXt EPUB Helpers 1.10 </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--make</th><td></td><td>create epub zip file</td></tr>
+ <tr><th>--purge</th><td></td><td>remove obsolete files</td></tr>
+ <tr><th>--rename</th><td></td><td>rename images to sane names</td></tr>
+ <tr><th>--svgmath</th><td></td><td>convert mathml to svg</td></tr>
+ <tr><th>--svgstyle</th><td></td><td>use given tex style for svg generation (overloads style in specification)</td></tr>
+ <tr><th>--all</th><td></td><td>assume: --purge --rename --svgmath (for fast testing)</td></tr>
+ </table>
+<br/>
+<h1>Example</h1>
+<tt>mtxrun --script epub --make mydocument</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/doc/context/scripts/mkiv/mtx-epub.man b/doc/context/scripts/mkiv/mtx-epub.man
new file mode 100644
index 000000000..23e5f71d5
--- /dev/null
+++ b/doc/context/scripts/mkiv/mtx-epub.man
@@ -0,0 +1,42 @@
+.TH "mtx-epub" "1" "01-01-2014" "version 1.10" "ConTeXt EPUB Helpers"
+.SH NAME
+.B mtx-epub
+.SH SYNOPSIS
+.B mtxrun --script epub [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B ConTeXt EPUB Helpers
+.SH OPTIONS
+.TP
+.B --make
+create epub zip file
+.TP
+.B --purge
+remove obsolete files
+.TP
+.B --rename
+rename images to sane names
+.TP
+.B --svgmath
+convert mathml to svg
+.TP
+.B --svgstyle
+use given tex style for svg generation (overloads style in specification)
+.TP
+.B --all
+assume: --purge --rename --svgmath (for fast testing)
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/doc/context/scripts/mkiv/mtx-epub.xml b/doc/context/scripts/mkiv/mtx-epub.xml
new file mode 100644
index 000000000..4ef0f85e3
--- /dev/null
+++ b/doc/context/scripts/mkiv/mtx-epub.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-epub</entry>
+ <entry name="detail">ConTeXt EPUB Helpers</entry>
+ <entry name="version">1.10</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="make"><short>create epub zip file</short></flag>
+ <flag name="purge"><short>remove obsolete files</short></flag>
+ <flag name="rename"><short>rename images to sane names</short></flag>
+ <flag name="svgmath"><short>convert mathml to svg</short></flag>
+ <flag name="svgstyle"><short>use given tex style for svg generation (overloads style in specification)</short></flag>
+ <flag name="all"><short>assume: --purge --rename --svgmath (for fast testing)</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script epub --make mydocument</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
diff --git a/fonts/map/pdftex/context/mkiv-base.map b/fonts/map/pdftex/context/mkiv-base.map
index 482bf0894..fc2aa643b 100644
--- a/fonts/map/pdftex/context/mkiv-base.map
+++ b/fonts/map/pdftex/context/mkiv-base.map
@@ -135,110 +135,8 @@ eusm5 EUSM5 <eusm5.pfb
eusm7 EUSM7 <eusm7.pfb
eusm10 EUSM10 <eusm10.pfb
-% lm leftovers, used by inclusion
-
-cmb10 CMB10 <lm-rep-cmrm.enc <lmb10.pfb
-cmbx10 CMBX10 <lm-rep-cmrm.enc <lmbx10.pfb
-cmbx12 CMBX12 <lm-rep-cmrm.enc <lmbx12.pfb
-cmbx5 CMBX5 <lm-rep-cmrm.enc <lmbx5.pfb
-cmbx6 CMBX6 <lm-rep-cmrm.enc <lmbx6.pfb
-cmbx7 CMBX7 <lm-rep-cmrm.enc <lmbx7.pfb
-cmbx8 CMBX8 <lm-rep-cmrm.enc <lmbx8.pfb
-cmbx9 CMBX9 <lm-rep-cmrm.enc <lmbx9.pfb
-cmbxsl10 CMBXSL10 <lm-rep-cmrm.enc <lmbxo10.pfb
-cmbxti10 CMBXTI10 <lm-rep-cmit.enc <lmbxi10.pfb
-cmbxti12 CMBXTI12 <lm-rep-cmit.enc <lmbxti10.pfb
-cmbxti7 CMBXTI7 <lm-rep-cmit.enc <lmbxti10.pfb
-cmcsc10 CMCSC10 <lm-rep-cmsc.enc <lmcsc10.pfb
-cmcsc8 CMCSC8 <lm-rep-cmsc.enc <lmcsc10.pfb
-cmcsc9 CMCSC9 <lm-rep-cmsc.enc <lmcsc10.pfb
-cminch CMINCH <lm-rep-cmin.enc <lmssbx10.pfb
-cmitt10 CMITT10 <lm-rep-cmitt.enc <lmtti10.pfb
-cmitt12 CMITT12 <lm-rep-cmitt.enc <lmtti10.pfb
-cmitt9 CMITT9 <lm-rep-cmitt.enc <lmtti10.pfb
-cmr10 CMR10 <lm-rep-cmrm.enc <lmr10.pfb
-cmr12 CMR12 <lm-rep-cmrm.enc <lmr12.pfb
-cmr17 CMR17 <lm-rep-cmrm.enc <lmr17.pfb
-cmr5 CMR5 <lm-rep-cmsc.enc <lmr5.pfb
-cmr6 CMR6 <lm-rep-cmrm.enc <lmr6.pfb
-cmr7 CMR7 <lm-rep-cmrm.enc <lmr7.pfb
-cmr8 CMR8 <lm-rep-cmrm.enc <lmr8.pfb
-cmr9 CMR9 <lm-rep-cmrm.enc <lmr9.pfb
-cmsl10 CMSL10 <lm-rep-cmrm.enc <lmro10.pfb
-cmsl12 CMSL12 <lm-rep-cmrm.enc <lmro12.pfb
-cmsl6 CMSL6 <lm-rep-cmrm.enc <lmro8.pfb
-cmsl8 CMSL8 <lm-rep-cmrm.enc <lmro8.pfb
-cmsl9 CMSL9 <lm-rep-cmrm.enc <lmro9.pfb
-cmsltt10 CMSLTT10 <lm-rep-cmtt.enc <lmtto10.pfb
-cmsltt9 CMSLTT9 <lm-rep-cmtt.enc <lmtto10.pfb
-cmss10 CMS10 <lm-rep-cmrm.enc <lmss10.pfb
-cmss12 CMS12 <lm-rep-cmrm.enc <lmss12.pfb
-cmss17 CMS17 <lm-rep-cmrm.enc <lmss17.pfb
-cmss8 CMS8 <lm-rep-cmrm.enc <lmss8.pfb
-cmss9 CMS9 <lm-rep-cmrm.enc <lmss9.pfb
-cmssbx10 CMSSBX10 <lm-rep-cmrm.enc <lmssbx10.pfb
-cmssbxo10 CMSSBXO10 <lm-rep-cmrm.enc <lmssbo10.pfb
-cmssdc10 CMSSDC10 <lm-rep-cmrm.enc <lmssdc10.pfb
-cmssi10 CMSI10 <lm-rep-cmrm.enc <lmsso10.pfb
-cmssi12 CMSI12 <lm-rep-cmrm.enc <lmsso12.pfb
-cmssi17 CMSI17 <lm-rep-cmrm.enc <lmsso17.pfb
-cmssi8 CMSSI8 <lm-rep-cmrm.enc <lmsso8.pfb
-cmssi9 CMSSI9 <lm-rep-cmrm.enc <lmsso9.pfb
-cmssq8 CMSQ8 <lm-rep-cmrm.enc <lmssq8.pfb
-cmssqi8 CMSSQI8 <lm-rep-cmrm.enc <lmssqo8.pfb
-cmtcsc10 CMTCSC10 <lm-rep-cmtt.enc <lmtcsc10.pfb
-cmti10 CMTI10 <lm-rep-cmit.enc <lmri10.pfb
-cmti12 CMTI12 <lm-rep-cmit.enc <lmri12.pfb
-cmti7 CMTI7 <lm-rep-cmit.enc <lmri7.pfb
-cmti8 CMTI8 <lm-rep-cmit.enc <lmri8.pfb
-cmti9 CMTI9 <lm-rep-cmit.enc <lmri9.pfb
-cmtt10 CMTT10 <lm-rep-cmtt.enc <lmtt10.pfb
-cmtt12 CMTT12 <lm-rep-cmtt.enc <lmtt12.pfb
-cmtt8 CMTT8 <lm-rep-cmtt.enc <lmtt8.pfb
-cmtt9 CMTT9 <lm-rep-cmtt.enc <lmtt9.pfb
-cmvtt10 CMVTT10 <lm-rep-cmrm.enc <lmvtt10.pfb
-
-% math
-
-cmex10 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
-cmex9 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
-cmex8 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
-cmex7 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
-
-cmmi5 LMMathItalic5-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi5.pfb
-cmmi6 LMMathItalic6-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi6.pfb
-cmmi7 LMMathItalic7-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi7.pfb
-cmmi8 LMMathItalic8-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi8.pfb
-cmmi9 LMMathItalic9-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi9.pfb
-cmmi10 LMMathItalic10-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi10.pfb
-cmmi12 LMMathItalic12-Italic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi12.pfb
-
-cmmib5 LMMathItalic5-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib5.pfb
-cmmib6 LMMathItalic6-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib6.pfb
-cmmib7 LMMathItalic7-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib7.pfb
-cmmib8 LMMathItalic8-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib8.pfb
-cmmib9 LMMathItalic9-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib9.pfb
-cmmib10 LMMathItalic10-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib10.pfb
-cmmib12 LMMathItalic12-BoldItalic "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib12.pfb
-
-cmsy5 LMMathSymbols5-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy5.pfb
-cmsy6 LMMathSymbols6-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy6.pfb
-cmsy7 LMMathSymbols7-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy7.pfb
-cmsy8 LMMathSymbols8-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy8.pfb
-cmsy9 LMMathSymbols9-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy9.pfb
-cmsy10 LMMathSymbols10-Italic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy10.pfb
-
-cmbsy5 LMMathSymbols5-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy5.pfb
-cmbsy6 LMMathSymbols6-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy6.pfb
-cmbsy7 LMMathSymbols7-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy7.pfb
-cmbsy8 LMMathSymbols8-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy8.pfb
-cmbsy9 LMMathSymbols9-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy9.pfb
-cmbsy10 LMMathSymbols10-BoldItalic "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy10.pfb
-
% original
-cmdunh10 CMDUNH10 <cmdunh10.pfb
-
cmtex10 CMTEX10 <cmtex10.pfb
cmtex8 CMTEX8 <cmtex8.pfb
cmtex9 CMTEX9 <cmtex9.pfb
@@ -247,6 +145,80 @@ cmff10 CMFF10 <cmff10.pfb
cmfi10 CMFI10 <cmfi10.pfb
cmfib8 CMFIB8 <cmfib8.pfb
-% a weird one, not used in context
-
-cmu10 LMRoman10-Italic "-0.25 SlantFont" <lm-rep-cmit.enc <lmri10.pfb
+% the latest gust list
+
+cmb10 LMRomanDemi10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmb10.pfb
+cmbx10 LMRoman10-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx10.pfb
+cmbx5 LMRoman5-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx5.pfb
+cmbx6 LMRoman6-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx6.pfb
+cmbx7 LMRoman7-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx7.pfb
+cmbx8 LMRoman8-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx8.pfb
+cmbx9 LMRoman9-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbx9.pfb
+cmbxsl10 LMRomanSlant10-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmbxo10.pfb
+cmbxti10 LMRoman10-BoldItalic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmbxi10.pfb
+cmcsc10 LMRomanCaps10-Regular "enclmrepcmsc ReEncodeFont" <lm-rep-cmsc.enc <lmcsc10.pfb
+cmcscsl10 LMRomanCaps10-Oblique "enclmrepcmsc ReEncodeFont" <lm-rep-cmsc.enc <lmcsco10.pfb
+cmdunh10 LMRomanDunh10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmdunh10.pfb
+cminch LMSans10-Bold "enclmrepcmin ReEncodeFont" <lm-rep-cmin.enc <lmssbx10.pfb
+cmitt10 LMMono10-Italic "enclmrepcmitt ReEncodeFont" <lm-rep-cmitt.enc <lmtti10.pfb
+cmr10 LMRoman10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr10.pfb
+cmr12 LMRoman12-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr12.pfb
+cmr17 LMRoman17-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr17.pfb
+cmr5 LMRoman5-Regular "enclmrepcmsc ReEncodeFont" <lm-rep-cmsc.enc <lmr5.pfb
+cmr6 LMRoman6-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr6.pfb
+cmr7 LMRoman7-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr7.pfb
+cmr8 LMRoman8-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr8.pfb
+cmr9 LMRoman9-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmr9.pfb
+cmsl10 LMRomanSlant10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmro10.pfb
+cmsl12 LMRomanSlant12-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmro12.pfb
+cmsl8 LMRomanSlant8-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmro8.pfb
+cmsl9 LMRomanSlant9-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmro9.pfb
+cmsltt10 LMMonoSlant10-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtto10.pfb
+cmss10 LMSans10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss10.pfb
+cmss12 LMSans12-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss12.pfb
+cmss17 LMSans17-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss17.pfb
+cmss8 LMSans8-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss8.pfb
+cmss9 LMSans9-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmss9.pfb
+cmssbx10 LMSans10-Bold "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssbx10.pfb
+cmssbxo10 LMSans10-BoldOblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssbo10.pfb
+cmssdc10 LMSansDemiCond10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssdc10.pfb
+cmssi10 LMSans10-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso10.pfb
+cmssi12 LMSans12-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso12.pfb
+cmssi17 LMSans17-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso17.pfb
+cmssi8 LMSans8-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso8.pfb
+cmssi9 LMSans9-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmsso9.pfb
+cmssq8 LMSansQuot8-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssq8.pfb
+cmssqi8 LMSansQuot8-Oblique "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmssqo8.pfb
+cmtcsc10 LMMonoCaps10-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtcsc10.pfb
+cmti10 LMRoman10-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri10.pfb
+cmti12 LMRoman12-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri12.pfb
+cmti7 LMRoman7-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri7.pfb
+cmti8 LMRoman8-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri8.pfb
+cmti9 LMRoman9-Italic "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmri9.pfb
+cmtt10 LMMono10-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtt10.pfb
+cmtt12 LMMono12-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtt12.pfb
+cmtt8 LMMono8-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtt8.pfb
+cmtt9 LMMono9-Regular "enclmrepcmtt ReEncodeFont" <lm-rep-cmtt.enc <lmtt9.pfb
+cmu10 LMRomanUnsl10-Regular "enclmrepcmit ReEncodeFont" <lm-rep-cmit.enc <lmu10.pfb
+cmvtt10 LMMonoProp10-Regular "enclmrepcmrm ReEncodeFont" <lm-rep-cmrm.enc <lmvtt10.pfb
+
+cmbsy5 LMMathSymbols5-Bold "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy5.pfb
+cmbsy7 LMMathSymbols7-Bold "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy7.pfb
+cmbsy10 LMMathSymbols10-Bold "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmbsy10.pfb
+cmsy5 LMMathSymbols5-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy5.pfb
+cmsy6 LMMathSymbols6-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy6.pfb
+cmsy7 LMMathSymbols7-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy7.pfb
+cmsy8 LMMathSymbols8-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy8.pfb
+cmsy9 LMMathSymbols9-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy9.pfb
+cmsy10 LMMathSymbols10-Regular "enclmmathsy ReEncodeFont" <lm-mathsy.enc <lmsy10.pfb
+cmex10 LMMathExtension10-Regular "enclmmathex ReEncodeFont" <lm-mathex.enc <lmex10.pfb
+cmmi5 LMMathItalic5-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi5.pfb
+cmmi6 LMMathItalic6-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi6.pfb
+cmmi7 LMMathItalic7-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi7.pfb
+cmmi8 LMMathItalic8-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi8.pfb
+cmmi9 LMMathItalic9-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi9.pfb
+cmmi10 LMMathItalic10-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi10.pfb
+cmmi12 LMMathItalic12-Regular "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmi12.pfb
+cmmib5 LMMathItalic5-Bold "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib5.pfb
+cmmib7 LMMathItalic7-Bold "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib7.pfb
+cmmib10 LMMathItalic10-Bold "enclmmathit ReEncodeFont" <lm-mathit.enc <lmmib10.pfb
diff --git a/scripts/context/lua/mtx-epub.lua b/scripts/context/lua/mtx-epub.lua
index 094d589b5..fa5a85134 100644
--- a/scripts/context/lua/mtx-epub.lua
+++ b/scripts/context/lua/mtx-epub.lua
@@ -28,9 +28,39 @@ if not modules then modules = { } end modules ['mtx-epub'] = {
-- Text
-- mimetype
+-- todo:
+--
+-- remove m_k_v_i prefixes
+-- remap fonts %mono% in css so that we can replace
+-- coverpage tests
+-- split up
+
local format, gsub, find = string.format, string.gsub, string.find
-local concat = table.concat
-local replace = utilities.templates.replace
+local concat, sortedhash = table.concat, table.sortedhash
+
+local formatters = string.formatters
+local replacetemplate = utilities.templates.replace
+
+local addsuffix = file.addsuffix
+local nameonly = file.nameonly
+local basename = file.basename
+local joinfile = file.join
+local suffix = file.suffix
+local addsuffix = file.addsuffix
+local removesuffix = file.removesuffix
+local replacesuffix = file.replacesuffix
+
+local copyfile = file.copy
+local removefile = os.remove
+
+local needsupdating = file.needsupdating
+
+local isdir = lfs.isdir
+local isfile = lfs.isfile
+local mkdir = lfs.mkdir
+
+local pushdir = dir.push
+local popdir = dir.pop
local helpinfo = [[
<?xml version="1.0"?>
@@ -38,12 +68,17 @@ local helpinfo = [[
<metadata>
<entry name="name">mtx-epub</entry>
<entry name="detail">ConTeXt EPUB Helpers</entry>
- <entry name="version">1.00</entry>
+ <entry name="version">1.10</entry>
</metadata>
<flags>
<category name="basic">
<subcategory>
<flag name="make"><short>create epub zip file</short></flag>
+ <flag name="purge"><short>remove obsolete files</short></flag>
+ <flag name="rename"><short>rename images to sane names</short></flag>
+ <flag name="svgmath"><short>convert mathml to svg</short></flag>
+ <flag name="svgstyle"><short>use given tex style for svg generation (overloads style in specification)</short></flag>
+ <flag name="all"><short>assume: --purge --rename --svgmath (for fast testing)</short></flag>
</subcategory>
</category>
</flags>
@@ -60,10 +95,12 @@ local helpinfo = [[
local application = logs.application {
name = "mtx-epub",
- banner = "ConTeXt EPUB Helpers 1.00",
+ banner = "ConTeXt EPUB Helpers 1.10",
helpinfo = helpinfo,
}
+local report = application.report
+
-- script code
scripts = scripts or { }
@@ -105,7 +142,7 @@ local t_package = [[
<dc:coverage>%coverage%</dc:coverage>
<dc:rights>%rights%</dc:rights>
-->
- <meta name="cover" content="%firstpage%" />
+ <meta name="cover" content="%coverpage%" />
<meta name="generator" content="ConTeXt MkIV" />
<meta property="dcterms:modified">%date%</meta>
</metadata>
@@ -122,19 +159,20 @@ local t_package = [[
</package>
]]
+
local t_item = [[ <item id="%id%" href="%filename%" media-type="%mime%" />]]
-local t_nav = [[ <item id="%id%" href="%filename%" media-type="%mime%" properties="%properties%" />]]
+local t_prop = [[ <item id="%id%" href="%filename%" media-type="%mime%" properties="%properties%" />]]
-- <!DOCTYPE ncx PUBLIC "-//NISO//DTD ncx 2005-1//EN" "http://www.daisy.org/z3986/2005/ncx-2005-1.dtd">
local t_toc = [[
<?xml version="1.0" encoding="UTF-8"?>
+<!-- this is no longer needed in epub 3.0+ -->
+
<ncx xmlns="http://www.daisy.org/z3986/2005/ncx/" version="2005-1">
<head>
- <meta charset="utf-8" />
-
<meta name="generator" content="ConTeXt MkIV" />
<meta name="dtb:uid" content="%identifier%" />
<meta name="dtb:depth" content="2" />
@@ -215,7 +253,7 @@ local t_coverimg = [[
local function dumbid(filename)
-- return (string.gsub(os.uuid(),"%-%","")) -- to be tested
- return file.nameonly(filename) .. "-" .. file.suffix(filename)
+ return nameonly(filename) .. "-" .. suffix(filename)
end
local mimetypes = {
@@ -237,41 +275,75 @@ local idmakers = {
default = function(filename) return dumbid(filename) end,
}
--- specification = {
--- name = "document",
--- identifier = "123",
--- root = "a.xhtml",
--- files = {
--- "a.xhtml",
--- "b.css",
--- "c.png",
--- }
--- }
-
-local function relocateimages(imagedata,oldname,newname,subpath)
+local function relocateimages(imagedata,oldname,newname,subpath,rename)
local data = io.loaddata(oldname)
- local images = { }
- local done = gsub(data,[[(id=")(.-)(".-background%-image *: * url%()(.-)(%))]], function(s1,id,s2,name,s3)
- local newname = imagedata[id].newname
- if newname then
- if subpath then
- name = file.join(subpath,file.basename(new name))
- else
- name = file.basename(newname)
+ if data then
+ subpath = joinfile("..",subpath)
+ report("relocating images")
+ local n = 0
+ local done = gsub(data,[[(id=")(.-)(".-background%-image *: *url%()(.-)(%))]], function(s1,id,s2,name,s3)
+ local newname = imagedata[id].newname
+ if newname then
+ if subpath then
+ name = joinfile(subpath,basename(newname))
+ else
+ name = basename(newname)
+ end
+ -- name = url.addscheme(name)
end
- -- name = url.addscheme(name)
- end
- images[#images+1] = name
+ if newname then
+ n = n + 1
+ if rename then
+ name = joinfile(subpath,addsuffix(id,suffix(name)))
+ end
+ return s1 .. id .. s2 .. name .. s3
+ end
+ end)
+ report("%s images relocated in %a",n,newname)
if newname then
- return s1 .. id .. s2 .. name .. s3
+ io.savedata(newname,done)
end
- end)
- if newname then
- io.savedata(newname,done)
end
return images
end
+function reportobsolete(oldfiles,newfiles,purge)
+
+ for i=1,#oldfiles do oldfiles[i] = gsub(oldfiles[i],"^[%./]+","") end
+ for i=1,#newfiles do newfiles[i] = gsub(newfiles[i],"^[%./]+","") end
+
+ local old = table.tohash(oldfiles)
+ local new = table.tohash(newfiles)
+ local done = false
+
+ for name in sortedhash(old) do
+ if not new[name] then
+ if not done then
+ report()
+ if purge then
+ report("removing obsolete files:")
+ else
+ report("obsolete files:")
+ end
+ report()
+ done = true
+ end
+ report(" %s",name)
+ if purge then
+ removefile(name)
+ end
+ end
+ end
+
+ if done then
+ report()
+ end
+
+ return done
+
+end
+
+
local zippers = {
{
name = "zip",
@@ -287,246 +359,401 @@ local zippers = {
},
}
-function scripts.epub.make()
+function scripts.epub.make(purge,rename,svgmath,svgstyle)
+
+ -- one can enter a jobname or jobname-export but the simple jobname is
+ -- preferred
local filename = environment.files[1]
if not filename or filename == "" or type(filename) ~= "string" then
- application.report("provide filename")
+ report("provide filename")
return
end
- filename = file.basename(filename)
+ local specpath, specname, specfull
+
+ if isdir(filename) then
+ specpath = filename
+ specname = addsuffix(specpath,"lua")
+ specfull = joinfile(specpath,specname)
+ end
- local specfile = file.replacesuffix(filename,"specification")
+ if not specfull or not isfile(specfull) then
+ specpath = filename .. "-export"
+ specname = addsuffix(filename .. "-pub","lua")
+ specfull = joinfile(specpath,specname)
+ end
- if not lfs.isfile(specfile) then
- application.report("unknown specificaton file %a",specfile)
+ if not specfull or not isfile(specfull) then
+ report("unknown specificaton file for %a",filename)
return
end
- local specification = dofile(specfile)
+ local specification = dofile(specfull)
if not specification or not next(specification) then
- application.report("invalid specificaton file %a",specfile)
+ report("invalid specificaton file %a",specfile)
return
end
+ report("using specification file %a",specfull)
+
-- images: { ... url = location ... }
- local name = specification.name or file.removesuffix(filename)
+ local defaultcoverpage = "cover.xhtml"
+
+ local name = specification.name or nameonly(filename)
local identifier = specification.identifier or ""
- local files = specification.files or { file.addsuffix(filename,"xhtml") }
+ local htmlfiles = specification.htmlfiles or { }
+ local styles = specification.styles or { }
local images = specification.images or { }
- local root = specification.root or files[1]
+ local htmlroot = specification.htmlroot or htmlfiles[1] or ""
local language = specification.language or "en"
- local creator = "context mkiv"
+ local creator = specification.creator or "context mkiv"
local author = specification.author or "anonymous"
local title = specification.title or name
local subtitle = specification.subtitle or ""
- local firstpage = specification.firstpage or ""
- local lastpage = specification.lastpage or ""
local imagefile = specification.imagefile or ""
+ local imagepath = specification.imagepath or "images"
+ local stylepath = specification.stylepath or "styles"
+ local coverpage = specification.firstpage or defaultcoverpage
+
+ if type(svgstyle) == "string" and not svgstyle then
+ svgstyle = specification.svgstyle or ""
+ end
+
+ local obsolete = false
+
+ if #htmlfiles == 0 then
+ report("no html files specified")
+ return
+ end
+ if htmlroot == "" then
+ report("no html root file specified")
+ return
+ end
if subtitle ~= "" then
title = format("%s, %s",title,subtitle)
end
- -- identifier = gsub(identifier,"[^a-zA-z0-9]","")
+ local htmlsource = specpath
+ local imagesource = joinfile(specpath,imagepath)
+ local stylesource = joinfile(specpath,stylepath)
- if firstpage == "" then
- -- firstpage = "firstpage.jpg" -- dummy
- else
- images[firstpage] = firstpage
+ -- once we're here we can start moving files to the right spot; first we deal
+ -- with images
+
+ -- ["image-1"]={
+ -- height = "7.056cm",
+ -- name = "file:///t:/sources/cow.svg",
+ -- page = "1",
+ -- width = "9.701cm",
+ -- }
+
+ -- end of todo
+
+ local pdftosvg = os.which("mudraw") and formatters[ [[mudraw -o "%s" "%s" %s]] ]
+
+ local f_svgname = formatters["%s-page-%s.svg"]
+
+ local notupdated = 0
+ local updated = 0
+ local skipped = 0
+ local oldfiles = dir.glob(file.join(imagesource,"*"))
+ local newfiles = { }
+
+ if not pdftosvg then
+ report("the %a binary is not present","mudraw")
end
- if lastpage == "" then
- -- lastpage = "lastpage.jpg" -- dummy
+
+ -- a coverpage file has to be in the root of the export tree
+
+ if not coverpage then
+ report("no cover page (image) defined")
+ elseif suffix(coverpage) ~= "xhtml" then
+ report("using cover page %a",coverpage)
+ local source = coverpage
+ local target = joinfile(htmlsource,coverpage)
+ htmlfiles[#htmlfiles+1 ] = coverpage
+ report("copying coverpage %a to %a",source,target)
+ copyfile(source,target)
+ elseif isfile(coverpage) then
+ report("using cover page image %a",coverpage)
+ images.cover = {
+ height = "100%",
+ width = "100%",
+ page = "1",
+ name = url.filename(coverpage),
+ used = coverpage,
+ }
+ local data = replacetemplate(t_coverxhtml, {
+ content = replacetemplate(t_coverimg, {
+ image = coverpage,
+ })
+ })
+ coverpage = defaultcoverpage
+ local target = joinfile(htmlsource,coverpage)
+ report("saving coverpage to %a",target)
+ io.savedata(target,data)
+ htmlfiles[#htmlfiles+1 ] = coverpage
else
- images[lastpage] = lastpage
+ report("cover page image %a is not present",coverpage)
+ coverpage = false
+ end
+
+ if not coverpage then
+ local data = replacetemplate(t_coverxhtml, {
+ content = "no cover page"
+ })
+ coverpage = defaultcoverpage
+ local target = joinfile(htmlsource,coverpage)
+ report("saving dummy coverpage to %a",target)
+ io.savedata(target,data)
+ htmlfiles[#htmlfiles+1 ] = coverpage
+ end
+
+ for id, data in sortedhash(images) do
+ local name = url.filename(data.name)
+ local used = url.filename(data.used)
+ local base = basename(used)
+ local page = data.page or "1"
+ -- todo : check timestamp and prefix, rename to image-*
+ if suffix(used) == "pdf" then
+ name = f_svgname(nameonly(name),page)
+ local source = used
+ local target = joinfile(imagesource,name)
+ if needsupdating(source,target) then
+ if pdftosvg then
+ local command = pdftosvg(target,source,page)
+ report("running command %a",command)
+ os.execute(command)
+ updated = updated + 1
+ else
+ skipped = skipped + 1
+ end
+ else
+ notupdated = notupdated + 1
+ end
+ newfiles[#newfiles+1] = target
+ else
+ name = basename(used)
+ local source = used
+ local target = joinfile(imagesource,name)
+ if needsupdating(source,target) then
+ report("copying %a to %a",source,target)
+ copyfile(source,target)
+ updated = updated + 1
+ else
+ notupdated = notupdated + 1
+ -- no message
+ end
+ newfiles[#newfiles+1] = target
+ end
+ local target = newfiles[#newfiles]
+ if suffix(target) == "svg" then
+ local data = io.loaddata(target)
+ data = gsub(data,"<!(DOCTYPE.-)>","<!-- %1 -->",1)
+ io.savedata(target,data)
+ end
+ data.newname = name -- without path
+ end
+
+ report("%s images checked, %s updated, %s kept, %s skipped",updated + notupdated + skipped,updated,notupdated,skipped)
+
+ if reportobsolete(oldfiles,newfiles,purge) then
+ obsolete = true
+ end
+
+ -- here we can decide not to make an epub
+
+ local uuid = format("urn:uuid:%s",os.uuid(true)) -- os.uuid()
+ local identifier = "bookid" -- for now
+
+ local epubname = removesuffix(name)
+ local epubpath = name .. "-epub"
+ local epubfile = replacesuffix(name,"epub")
+ local epubroot = replacesuffix(name,"opf")
+ local epubtoc = "toc.ncx"
+ local epubmimetypes = "mimetype"
+ local epubcontainer = "container.xml"
+ local epubnavigator = "nav.xhtml"
+
+ local metapath = "META-INF"
+ local datapath = "OEBPS"
+
+ local oldfiles = dir.glob(file.join(epubpath,"**/*"))
+ local newfiles = { }
+
+ report("creating paths in tree %a",epubpath)
+
+ if not isdir(epubpath) then
+ mkdir(epubpath)
+ end
+ if not isdir(epubpath) then
+ report("unable to create path %a",epubpath)
+ return
end
- local uuid = format("urn:uuid:%s",os.uuid(true)) -- os.uuid()
+ local metatarget = joinfile(epubpath,metapath)
+ local htmltarget = joinfile(epubpath,datapath)
+ local styletarget = joinfile(epubpath,datapath,stylepath)
+ local imagetarget = joinfile(epubpath,datapath,imagepath)
- identifier = "bookid" -- for now
+ mkdir(metatarget)
+ mkdir(htmltarget)
+ mkdir(styletarget)
+ mkdir(imagetarget)
- local epubname = name
- local epubpath = file.replacesuffix(name,"tree")
- local epubfile = file.replacesuffix(name,"epub")
- local epubroot = file.replacesuffix(name,"opf")
- local epubtoc = "toc.ncx"
- local epubcover = "cover.xhtml"
+ local used = { }
+ local notupdated = 0
+ local updated = 0
- application.report("creating paths in tree %a",epubpath)
- lfs.mkdir(epubpath)
- lfs.mkdir(file.join(epubpath,"META-INF"))
- lfs.mkdir(file.join(epubpath,"OEBPS"))
+ local oldimagespecification = joinfile(htmlsource,imagefile)
+ local newimagespecification = joinfile(htmltarget,imagefile)
- local used = { }
+ report("removing %a",newimagespecification)
+ -- removefile(newimagespecification) -- because we update that one
- local function registerone(filename)
- local suffix = file.suffix(filename)
+ local function registerone(path,filename,mathml)
+ local suffix = suffix(filename)
local mime = mimetypes[suffix]
if mime then
- local idmaker = idmakers[suffix] or idmakers.default
- used[#used+1] = replace(t_item, {
- id = idmaker(filename),
- filename = filename,
- mime = mime,
- } )
+ local idmaker = idmakers[suffix] or idmakers.default
+ local fullname = path and joinfile(path,filename) or filename
+ if mathml then
+ used[#used+1] = replacetemplate(t_prop, {
+ id = idmaker(filename),
+ filename = fullname,
+ mime = mime,
+ properties = "mathml",
+ } )
+ else
+ used[#used+1] = replacetemplate(t_item, {
+ id = idmaker(filename),
+ filename = fullname,
+ mime = mime,
+ } )
+ end
return true
end
end
- local function copyone(filename,alternative)
- if registerone(filename) then
- local target = file.join(epubpath,"OEBPS",file.basename(filename))
- local source = alternative or filename
- file.copy(source,target)
- application.report("copying %a to %a",source,target)
+ local function registerandcopyfile(check,path,name,sourcepath,targetpath,newname)
+ if newname then
+ newname = replacesuffix(newname,suffix(name))
+ else
+ newname = name
end
- end
-
- if lfs.isfile(epubcover) then
- copyone(epubcover)
- epubcover = false
- else
- registerone(epubcover)
- end
-
- copyone("toc.ncx")
-
- local function copythem(files)
- for i=1,#files do
- local filename = files[i]
- if type(filename) == "string" then
- local suffix = file.suffix(filename)
- if suffix == "xhtml" then
- local alternative = file.replacesuffix(filename,"html")
- if lfs.isfile(alternative) then
- copyone(filename,alternative)
- else
- copyone(filename)
- end
- elseif suffix == "css" then
- if filename == "export-example.css" then
- if lfs.isfile(filename) then
- os.remove(filename)
- local original = resolvers.findfile(filename)
- application.report("updating local copy of %a from %a",filename,original)
- file.copy(original,filename)
- else
- filename = resolvers.findfile(filename)
- end
- elseif not lfs.isfile(filename) then
- filename = resolvers.findfile(filename)
- else
- -- use specific local one
- end
- copyone(filename)
+ local source = joinfile(sourcepath,name)
+ local target = joinfile(targetpath,newname)
+ local mathml = false
+ if suffix(source) == "xhtml" then
+ if find(io.loaddata(source),"MathML") then
+ mathml = true -- inbelievable: the property is only valid when there is mathml
+ end
+ end
+ if registerone(path,newname,mathml) then
+ if not check or needsupdating(source,target) or mathml and svgmath then
+ report("copying %a to %a",source,target)
+ copyfile(source,target)
+ updated = updated + 1
+ else
+ notupdated = notupdated + 1
+ end
+ newfiles[#newfiles+1] = target
+ if mathml and svgmath then
+ report()
+ report("converting mathml into svg in %a",target)
+ report()
+ local status, total, unique = moduledata.svgmath.convert(target,svgstyle)
+ report()
+ if status then
+ report("%s formulas converted, %s are unique",total,unique)
else
- copyone(filename)
+ report("warning: %a in %a",total,target)
end
+ report()
end
end
end
- copythem(files)
+ for image, data in sortedhash(images) do
+ registerandcopyfile(true,imagepath,data.newname,imagesource,imagetarget,rename and image)
+ end
+ for i=1,#styles do
+ registerandcopyfile(false,stylepath,styles[i],stylesource,styletarget)
+ end
+ for i=1,#htmlfiles do
+ registerandcopyfile(false,false,htmlfiles[i],htmlsource,htmltarget)
+ end
- -- ["image-1"]={
- -- ["height"]="7.056cm",
- -- ["name"]="file:///t:/sources/cow.svg",
- -- ["page"]="1",
- -- ["width"]="9.701cm",
- -- }
+ relocateimages(images,oldimagespecification,newimagespecification,imagepath,rename)
- local theimages = { }
- local pdftosvg = string.formatters[ [[mudraw -o "%s" "%s" %s]] ]
+ report("%s files registered, %s updated, %s kept",updated + notupdated,updated,notupdated)
- for id, data in table.sortedpairs(images) do
- local name = url.filename(data.name)
- local used = url.filename(data.used)
- local base = file.basename(used)
- local page = data.page or ""
- if file.suffix(used) == "pdf" then
- -- todo : check timestamp and prefix, rename to image-*
- local command = pdftosvg(name,used,page)
- application.report("running command %a\n\n",command)
- os.execute(command)
- else
- name = used
- end
- data.newname = name
- theimages[#theimages+1] = name
+ local function saveinfile(what,name,data)
+ report("saving %s in %a",what,name)
+ io.savedata(name,data)
+ newfiles[#newfiles+1] = name
end
- used[#used+1] = replace(t_nav, {
+ used[#used+1] = replacetemplate(t_prop, {
id = "nav",
- filename = "nav.xhtml",
+ filename = epubnavigator,
properties = "nav",
mime = "application/xhtml+xml",
})
- io.savedata(file.join(epubpath,"OEBPS","nav.xhtml"),replace(t_navtoc, { -- version 3.0
- root = root,
- } ) )
+ registerone(false,epubtoc)
- copythem(theimages)
-
- local idmaker = idmakers[file.suffix(root)] or idmakers.default
+ saveinfile("navigation data",joinfile(htmltarget,epubnavigator),replacetemplate(t_navtoc, { -- version 3.0
+ root = htmlroot,
+ } ) )
- io.savedata(file.join(epubpath,"mimetype"),mimetype)
+ saveinfile("used mimetypes",joinfile(epubpath,epubmimetypes),mimetype)
- io.savedata(file.join(epubpath,"META-INF","container.xml"),replace(t_container, { -- version 2.0
+ saveinfile("version 2.0 container",joinfile(metatarget,epubcontainer),replacetemplate(t_container, {
rootfile = epubroot
} ) )
- io.savedata(file.join(epubpath,"OEBPS",epubroot),replace(t_package, {
+ local idmaker = idmakers[suffix(htmlroot)] or idmakers.default
+
+ saveinfile("package specification",joinfile(htmltarget,epubroot),replacetemplate(t_package, {
identifier = identifier,
title = title,
language = language,
uuid = uuid,
creator = creator,
date = os.date("!%Y-%m-%dT%H:%M:%SZ"),
- firstpage = idmaker(firstpage),
+ coverpage = idmaker(coverpage),
manifest = concat(used,"\n"),
- rootfile = idmaker(root)
+ rootfile = idmaker(htmlroot)
} ) )
-- t_toc is replaced by t_navtoc in >= 3
- io.savedata(file.join(epubpath,"OEBPS",epubtoc), replace(t_toc, {
+ saveinfile("table of contents",joinfile(htmltarget,epubtoc), replacetemplate(t_toc, {
identifier = uuid, -- identifier,
title = title,
author = author,
- root = root,
+ root = htmlroot,
} ) )
- if epubcover then
-
- io.savedata(file.join(epubpath,"OEBPS",epubcover), replace(t_coverxhtml, {
- content = firstpage ~= "" and replace(t_coverimg, { image = firstpage }) or "no cover page defined",
- } ) )
-
- end
-
- if imagefile ~= "" then
- local target = file.join(epubpath,"OEBPS",imagefile)
- application.report("relocating images")
- relocateimages(images,imagefile,target) -- ,file.join(epubpath,"OEBPS"))
- end
+ report("creating archive\n\n")
- application.report("creating archive\n\n")
+ pushdir(epubpath)
- lfs.chdir(epubpath)
- os.remove(epubfile)
+ removefile(epubfile)
local usedzipper = false
local function zipped(zipper)
- local ok = os.execute(format(zipper.uncompressed,epubfile,"mimetype"))
+ local ok = os.execute(format(zipper.uncompressed,epubfile,epubmimetypes))
if ok == 0 then
- os.execute(format(zipper.compressed,epubfile,"META-INF"))
- os.execute(format(zipper.compressed,epubfile,"OEBPS"))
+ os.execute(format(zipper.compressed,epubfile,metapath))
+ os.execute(format(zipper.compressed,epubfile,datapath))
usedzipper = zipper.name
return true
end
@@ -550,32 +777,52 @@ function scripts.epub.make()
end
end
- lfs.chdir("..")
+ popdir()
if usedzipper then
- local treefile = file.join(epubpath,epubfile)
- os.remove(epubfile)
- file.copy(treefile,epubfile)
- if lfs.isfile(epubfile) then
- os.remove(treefile)
+ local treefile = joinfile(epubpath,epubfile)
+ removefile(epubfile)
+ copyfile(treefile,epubfile)
+ if isfile(epubfile) then
+ removefile(treefile)
end
- application.report("epub archive made using %s: %s",usedzipper,epubfile)
+ report("epub archive made using %s: %s",usedzipper,epubfile)
else
local list = { }
for i=1,#zippers do
list[#list+1] = zippers[i].name
end
- application.report("no epub archive made, install one of: % | t",list)
+ report("no epub archive made, install one of: % | t",list)
+ end
+
+ if reportobsolete(oldfiles,newfiles,purge) then
+ obsolete = true
+ end
+
+ if obsolete and not purge then
+ report("use --purge to remove obsolete files")
end
end
--
-if environment.argument("make") then
- scripts.epub.make()
-elseif environment.argument("exporthelp") then
- application.export(environment.argument("exporthelp"),environment.files[1])
+local a_exporthelp = environment.argument("exporthelp")
+local a_make = environment.argument("make")
+local a_all = environment.argument("all")
+local a_purge = a_all or environment.argument("purge")
+local a_rename = a_all or environment.argument("rename")
+local a_svgmath = a_all or environment.argument("svgmath")
+local a_svgstyle = environment.argument("svgstyle")
+
+if a_make and a_svgmath then
+ require("x-math-svg")
+end
+
+if a_make then
+ scripts.epub.make(a_purge,a_rename,a_svgmath,a_svgstyle)
+elseif a_exporthelp then
+ application.export(a_exporthelp,environment.files[1])
else
application.help()
end
diff --git a/scripts/context/lua/mtx-update.lua b/scripts/context/lua/mtx-update.lua
index aedc48041..daf4f5b16 100644
--- a/scripts/context/lua/mtx-update.lua
+++ b/scripts/context/lua/mtx-update.lua
@@ -183,6 +183,8 @@ scripts.update.platforms = {
["linux-64"] = "linux-64",
["linux64"] = "linux-64",
--
+ ["linux-armhf"] = "linux-armhf",
+ --
["freebsd"] = "freebsd",
--
["freebsd-amd64"] = "freebsd-amd64",
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index b169309a6..fcfdc1f17 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -14273,7 +14273,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 64139, stripped down to: 44503
+-- original size: 64209, stripped down to: 44562
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -15463,6 +15463,9 @@ collect_instance_files=function(filename,askedformat,allresults)
end
end
local function findfiles(filename,filetype,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
local result,status=collect_instance_files(filename,filetype or "",allresults)
if not result or #result==0 then
local lowered=lower(filename)
@@ -16666,7 +16669,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-sch"] = package.loaded["data-sch"] or true
--- original size: 6213, stripped down to: 5160
+-- original size: 6567, stripped down to: 5302
if not modules then modules={} end modules ['data-sch']={
version=1.001,
@@ -16692,8 +16695,13 @@ directives.register("schemes.threshold",function(v) threshold=tonumber(v) or thr
function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-"))
+function cleaners.strip(specification)
+ local path,name=file.splitbase(specification.original)
+ if path=="" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-").."-"..name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
@@ -17577,8 +17585,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 724183
--- stripped bytes : 257372
+-- original bytes : 724607
+-- stripped bytes : 257595
-- end library merge
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index b169309a6..fcfdc1f17 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -14273,7 +14273,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 64139, stripped down to: 44503
+-- original size: 64209, stripped down to: 44562
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -15463,6 +15463,9 @@ collect_instance_files=function(filename,askedformat,allresults)
end
end
local function findfiles(filename,filetype,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
local result,status=collect_instance_files(filename,filetype or "",allresults)
if not result or #result==0 then
local lowered=lower(filename)
@@ -16666,7 +16669,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-sch"] = package.loaded["data-sch"] or true
--- original size: 6213, stripped down to: 5160
+-- original size: 6567, stripped down to: 5302
if not modules then modules={} end modules ['data-sch']={
version=1.001,
@@ -16692,8 +16695,13 @@ directives.register("schemes.threshold",function(v) threshold=tonumber(v) or thr
function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-"))
+function cleaners.strip(specification)
+ local path,name=file.splitbase(specification.original)
+ if path=="" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-").."-"..name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
@@ -17577,8 +17585,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 724183
--- stripped bytes : 257372
+-- original bytes : 724607
+-- stripped bytes : 257595
-- end library merge
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index b169309a6..fcfdc1f17 100755
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -14273,7 +14273,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 64139, stripped down to: 44503
+-- original size: 64209, stripped down to: 44562
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -15463,6 +15463,9 @@ collect_instance_files=function(filename,askedformat,allresults)
end
end
local function findfiles(filename,filetype,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
local result,status=collect_instance_files(filename,filetype or "",allresults)
if not result or #result==0 then
local lowered=lower(filename)
@@ -16666,7 +16669,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-sch"] = package.loaded["data-sch"] or true
--- original size: 6213, stripped down to: 5160
+-- original size: 6567, stripped down to: 5302
if not modules then modules={} end modules ['data-sch']={
version=1.001,
@@ -16692,8 +16695,13 @@ directives.register("schemes.threshold",function(v) threshold=tonumber(v) or thr
function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-"))
+function cleaners.strip(specification)
+ local path,name=file.splitbase(specification.original)
+ if path=="" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-").."-"..name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
@@ -17577,8 +17585,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 724183
--- stripped bytes : 257372
+-- original bytes : 724607
+-- stripped bytes : 257595
-- end library merge
diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua
index b169309a6..fcfdc1f17 100644
--- a/scripts/context/stubs/win64/mtxrun.lua
+++ b/scripts/context/stubs/win64/mtxrun.lua
@@ -14273,7 +14273,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 64139, stripped down to: 44503
+-- original size: 64209, stripped down to: 44562
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -15463,6 +15463,9 @@ collect_instance_files=function(filename,askedformat,allresults)
end
end
local function findfiles(filename,filetype,allresults)
+ if not filename or filename=="" then
+ return {}
+ end
local result,status=collect_instance_files(filename,filetype or "",allresults)
if not result or #result==0 then
local lowered=lower(filename)
@@ -16666,7 +16669,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-sch"] = package.loaded["data-sch"] or true
--- original size: 6213, stripped down to: 5160
+-- original size: 6567, stripped down to: 5302
if not modules then modules={} end modules ['data-sch']={
version=1.001,
@@ -16692,8 +16695,13 @@ directives.register("schemes.threshold",function(v) threshold=tonumber(v) or thr
function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-"))
+function cleaners.strip(specification)
+ local path,name=file.splitbase(specification.original)
+ if path=="" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-").."-"..name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path))
@@ -17577,8 +17585,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 724183
--- stripped bytes : 257372
+-- original bytes : 724607
+-- stripped bytes : 257595
-- end library merge
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index 74640ad08..e64b7b77c 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -28,6 +28,10 @@ if not modules then modules = { } end modules ['back-exp'] = {
-- todo: move critital formatters out of functions
-- todo: delay loading (apart from basic tag stuff)
+-- problem : too many local variables
+
+-- check setting __i__
+
local next, type, tonumber = next, type, tonumber
local format, concat, sub, gsub = string.format, table.concat, string.sub, string.gsub
local validstring = string.valid
@@ -42,6 +46,7 @@ local replacetemplate = utilities.templates.replace
local trace_export = false trackers.register ("export.trace", function(v) trace_export = v end)
local trace_spacing = false trackers.register ("export.trace.spacing", function(v) trace_spacing = v end)
+
local less_state = false directives.register("export.lessstate", function(v) less_state = v end)
local show_comment = true directives.register("export.comment", function(v) show_comment = v end)
@@ -233,30 +238,35 @@ local namespaced = {
}
local namespaces = {
- msubsup = "m",
- msub = "m",
- msup = "m",
- mn = "m",
- mi = "m",
- ms = "m",
- mo = "m",
- mtext = "m",
- mrow = "m",
- mfrac = "m",
- mroot = "m",
- msqrt = "m",
- munderover = "m",
- munder = "m",
- mover = "m",
- merror = "m",
- math = "m",
- mrow = "m",
- mtable = "m",
- mtr = "m",
- mtd = "m",
- mfenced = "m",
- maction = "m",
- mspace = "m",
+ msubsup = "m",
+ msub = "m",
+ msup = "m",
+ mn = "m",
+ mi = "m",
+ ms = "m",
+ mo = "m",
+ mtext = "m",
+ mrow = "m",
+ mfrac = "m",
+ mroot = "m",
+ msqrt = "m",
+ munderover = "m",
+ munder = "m",
+ mover = "m",
+ merror = "m",
+ math = "m",
+ mrow = "m",
+ mtable = "m",
+ mtr = "m",
+ mtd = "m",
+ mfenced = "m",
+ maction = "m",
+ mspace = "m",
+ -- only when testing
+ mstacker = "m",
+ mstackertop = "m",
+ mstackermid = "m",
+ mstackernbot = "m",
}
setmetatableindex(namespaced, function(t,k)
@@ -356,8 +366,8 @@ local styletemplate = [[
color : %color% ;
}]]
- function wrapups.allusedstyles(xmlfile)
- local result = { formatters["/* %s for file %s */"]("styles",xmlfile) }
+ function wrapups.allusedstyles(basename)
+ local result = { formatters["/* %s for file %s */"]("styles",basename) }
--
local bodyfont = finetuning.bodyfont
local width = finetuning.width
@@ -439,11 +449,10 @@ local imagetemplate = [[
end
end
- local f_images = formatters["/* %s for file %s */"]
local collected = { }
- function wrapups.allusedimages(xmlfile)
- local result = { f_images("images",xmlfile) }
+ function wrapups.allusedimages(basename)
+ local result = { formatters["/* %s for file %s */"]("images",basename) }
for element, details in sortedhash(usedimages) do
for detail, data in sortedhash(details) do
local name = data.name
@@ -1053,8 +1062,44 @@ do
element = "mtext",
data = { content = "" },
nature = "inline",
+ comment = "dummy nucleus"
}
+ local function accentchar(d)
+ for i=1,3 do
+ d = d.data
+ if not d then
+ return
+ end
+ d = d[1]
+ if not d then
+ return
+ end
+ local tg = d.tg
+ if tg == "mover" then
+ local p = properties[d.fulltag]
+ local t = p.top
+ if t then
+ d = d.data[1]
+ local d1 = d.data[1]
+ d1.content = utfchar(t)
+ d.data = { d1 }
+ return d
+ end
+ elseif tg == "munder" then
+ local p = properties[d.fulltag]
+ local b = p.bottom
+ if b then
+ d = d.data[1]
+ local d1 = d.data[1]
+ d1.content = utfchar(b)
+ d.data = { d1 }
+ return d
+ end
+ end
+ end
+ end
+
local function checkmath(root) -- we can provide utf.toentities as an option
local data = root.data
if data then
@@ -1093,33 +1138,13 @@ do
-- data[1] = dummy_nucleus
-- end
elseif roottg == "mfenced" then
- local new, n = { }, 0
- local attributes = { }
- root.attributes = attributes
- for i=1,ndata do
- local di = data[i]
- if not di then
- -- weird
- elseif di.content then
- n = n + 1
- new[n] = di
- else
- local tg = di.tg
- if tg == "mleft" then
- attributes.left = tostring(di.data[1].data[1].content)
- elseif tg == "mmiddle" then
- attributes.middle = tostring(di.data[1].data[1].content)
- elseif tg == "mright" then
- attributes.right = tostring(di.data[1].data[1].content)
- else
- n = n + 1
- di.__i__ = n
- new[n] = di
- end
- end
- end
- root.data = new
- ndata = n
+ local p = properties[root.fulltag]
+ local l, m, r = p.left, p.middle, p.right
+ root.attributes = {
+ left = l and utfchar(l),
+ middle = m and utfchar(m),
+ right = r and utfchar(r),
+ }
end
if ndata == 0 then
return
@@ -1156,9 +1181,122 @@ do
di.skip = "comment"
checkmath(di)
i = i + 1
- elseif tg == "mover" or tg == "munder" or tg == "munderover" then
+ elseif tg == "mover" then
+ if detail == "accent" then
+ local p = properties[di.fulltag]
+ local t = p.top
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ }
+ -- todo: p.topfixed
+ di.detail = nil
+ if t then
+ -- mover
+ d[1].data[1].content = utfchar(t)
+ di.data = { d[2], d[1] }
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "munder" then
if detail == "accent" then
- di.attributes = { accent = "true" }
+ local p = properties[di.fulltag]
+ local b = p.bottom
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ }
+ -- todo: p.bottomfixed
+ di.detail = nil
+ if b then
+ -- munder
+ d[2].data[1].content = utfchar(b)
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "munderover" then
+ if detail == "accent" then
+ local p = properties[di.fulltag]
+ local t = p.top
+ local b = p.bottom
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ -- todo: accentunder = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ accentunder = "true",
+ }
+ -- todo: p.topfixed
+ -- todo: p.bottomfixed
+ di.detail = nil
+ if t and b then
+ -- munderover
+ d[1].data[1].content = utfchar(t)
+ d[3].data[1].content = utfchar(b)
+ di.data = { d[2], d[3], d[1] }
+ else
+ -- can't happen
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mstacker" then
+ local d = di.data
+ local d1 = d[1]
+ local d2 = d[2]
+ local d3 = d[3]
+ local t1 = d1 and d1.tg
+ local t2 = d2 and d2.tg
+ local t3 = d3 and d3.tg
+ local m = nil -- d1.data[1]
+ local t = nil
+ local b = nil
+ -- only accent when top / bot have stretch
+ if t1 == "mstackermid" then
+ m = accentchar(d1) -- or m
+ if t2 == "mstackertop" then
+ if t3 == "mstackerbot" then
+ t = accentchar(d2)
+ b = accentchar(d3)
+ di.element = "munderover"
+ di.data = { m or d1.data[1], b or d3.data[1], t or d2.data[1] }
+ else
+ t = accentchar(d2)
+ di.element = "mover"
+ di.data = { m or d1.data[1], t or d2.data[1] }
+ end
+ elseif t2 == "mstackerbot" then
+ if t3 == "mstackertop" then
+ b = accentchar(d2)
+ t = accentchar(d3)
+ di.element = "munderover"
+ di.data = { m or d1.data[1], t or d3.data[1], m, b or d2.data[1] }
+ else
+ b = accentchar(d2)
+ di.element = "munder"
+ di.data = { m or d1.data[1], b or d2.data[1] }
+ end
+ else
+ -- can't happen
+ end
+ else
+ -- can't happen
+ end
+ if t or b then
+ di.attributes = {
+ accent = t and "true" or nil,
+ accentunder = b and "true" or nil,
+ }
di.detail = nil
end
checkmath(di)
@@ -1173,7 +1311,34 @@ do
elseif tg == "break" then
di.skip = "comment"
i = i + 1
- elseif tg == "mrow" and detail then
+ elseif tg == "mtext" then
+ -- this is only needed for unboxed mtexts ... all kind of special
+ -- tex border cases and optimizations ... trial and error
+ local data = di.data
+ if #data > 1 then
+ for i=1,#data do
+ local di = data[i]
+ local content = di.content
+ if content then
+ data[i] = {
+ element = "mtext",
+ nature = "inline",
+ data = { di },
+ n = 0,
+ }
+ elseif di.tg == "math" then
+ local di = di.data[1]
+ data[i] = di
+ checkmath(di)
+ end
+ end
+ di.element = "mrow"
+ -- di.tg = "mrow"
+ -- di.nature = "inline"
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mrow" and detail then -- hm, falls through
di.detail = nil
checkmath(di)
di = {
@@ -1272,21 +1437,47 @@ do
local ndata = #data
local n = 0
for i=1,ndata do
- local di = data[i]
- if di and not di.content then
- di = stripmath(di)
+ local d = data[i]
+ if d and not d.content then
+ d = stripmath(d)
end
- if di then
- local content = di.content
+ if d then
+ local content = d.content
if not content then
n = n + 1
- di.__i__ = n
- data[n] = di
+ d.__i__ = n
+ data[n] = d
elseif content == " " or content == "" then
- -- skip
+ if di.tg == "mspace" then
+ -- we append or prepend a space to a preceding or following mtext
+ local parent = di.__p__
+ local index = di.__i__ -- == i
+ local data = parent.data
+ if index > 1 then
+ local d = data[index-1]
+ if d.tg == "mtext" then
+ local dd = d.data
+ local dn = dd[#dd]
+ local dc = dn.content
+ if dc then
+ dn.content = dc .. content
+ end
+ end
+ elseif index < ndata then
+ local d = data[index+1]
+ if d.tg == "mtext" then
+ local dd = d.data
+ local dn = dd[1]
+ local dc = dn.content
+ if dc then
+ dn.content = content .. dc
+ end
+ end
+ end
+ end
else
n = n + 1
- data[n] = di
+ data[n] = d
end
end
end
@@ -1296,7 +1487,16 @@ do
if #data > 0 then
return di
end
+-- end
end
+ -- could be integrated but is messy then
+-- while roottg == "mrow" and #data == 1 do
+-- data = data[1]
+-- for k, v in next, data do
+-- root[k] = v
+-- end
+-- roottg = data.tg
+-- end
end
end
@@ -1795,46 +1995,50 @@ do
local di = data[i]
if not di then -- hm, di can be string
-- whatever
- elseif di.content then
- -- already has breaks
- local content = lpegmatch(p_entity,di.content)
- if i == nofdata and sub(content,-1) == "\n" then -- move check
- -- can be an end of line in par but can also be the last line
- if trace_spacing then
- result[#result+1] = f_spacing(di.parnumber or 0,sub(content,1,-2))
+ else
+ local content = di.content
+-- also optimize for content == "" : trace that first
+ if content then
+ -- already has breaks
+ local content = lpegmatch(p_entity,content)
+ if i == nofdata and sub(content,-1) == "\n" then -- move check
+ -- can be an end of line in par but can also be the last line
+ if trace_spacing then
+ result[#result+1] = f_spacing(di.parnumber or 0,sub(content,1,-2))
+ else
+ result[#result+1] = sub(content,1,-2)
+ end
+ result[#result+1] = " "
else
- result[#result+1] = sub(content,1,-2)
+ if trace_spacing then
+ result[#result+1] = f_spacing(di.parnumber or 0,content)
+ else
+ result[#result+1] = content
+ end
end
- result[#result+1] = " "
- else
- if trace_spacing then
- result[#result+1] = f_spacing(di.parnumber or 0,content)
+ elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par)
+ local element = di.element
+ if not element then
+ -- skip
+ elseif element == "break" then -- or element == "pagebreak"
+ emptytag(result,element,nature,di)
+ elseif element == "" or di.skip == "ignore" then
+ -- skip
else
- result[#result+1] = content
- end
- end
- elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par)
- local element = di.element
- if not element then
- -- skip
- elseif element == "break" then -- or element == "pagebreak"
- emptytag(result,element,nature,di)
- elseif element == "" or di.skip == "ignore" then
- -- skip
- else
- if di.before then
- flushtree(result,di.before,nature)
- end
- local natu = di.nature
- local skip = di.skip
- if di.breaknode then
- emptytag(result,"break","display",di)
- end
- begintag(result,element,natu,di,skip)
- flushtree(result,di.data,natu)
- endtag(result,element,natu,di,skip)
- if di.after then
- flushtree(result,di.after,nature)
+ if di.before then
+ flushtree(result,di.before,nature)
+ end
+ local natu = di.nature
+ local skip = di.skip
+ if di.breaknode then
+ emptytag(result,"break","display",di)
+ end
+ begintag(result,element,natu,di,skip)
+ flushtree(result,di.data,natu)
+ endtag(result,element,natu,di,skip)
+ if di.after then
+ flushtree(result,di.after,nature)
+ end
end
end
end
@@ -2222,12 +2426,13 @@ end
-- whatsit_code localpar_code
-local function collectresults(head,list) -- is last used (we also have currentattribute)
+local function collectresults(head,list,pat,pap) -- is last used (we also have currentattribute)
local p
for n in traverse_nodes(head) do
local id = getid(n) -- 14: image, 8: literal (mp)
if id == glyph_code then
local at = getattr(n,a_tagged)
+or pat
if not at then
-- we need to tag the pagebody stuff as being valid skippable
--
@@ -2236,7 +2441,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
-- we could add tonunicodes for ligatures (todo)
local components = getfield(n,"components")
if components then -- we loose data
- collectresults(components,nil)
+ collectresults(components,nil,at) -- this assumes that components have the same attribute as the glyph ... we should be more tolerant (see math)
else
local c = getchar(n)
if last ~= at then
@@ -2244,6 +2449,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
pushcontent()
currentnesting = tl
currentparagraph = getattr(n,a_taggedpar)
+or pap
currentattribute = at
last = at
pushentry(currentnesting)
@@ -2262,6 +2468,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
-- information unless we inject a special node (but even then we can run into nesting
-- issues)
local ap = getattr(n,a_taggedpar)
+or pap
if ap ~= currentparagraph then
pushcontent(currentparagraph,ap)
pushentry(currentnesting)
@@ -2338,6 +2545,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
-- skip this one ... already converted special character (node-acc)
elseif ca then
local a = getattr(n,a_tagged)
+or pat
if a then
local c = specialspaces[ca]
if last ~= a then
@@ -2348,12 +2556,14 @@ local function collectresults(head,list) -- is last used (we also have currentat
pushcontent()
currentnesting = tl
currentparagraph = getattr(n,a_taggedpar)
+or pap
currentattribute = a
last = a
pushentry(currentnesting)
-- no reference check (see above)
elseif last then
local ap = getattr(n,a_taggedpar)
+or pap
if ap ~= currentparagraph then
pushcontent(currentparagraph,ap)
pushentry(currentnesting)
@@ -2376,9 +2586,11 @@ local function collectresults(head,list) -- is last used (we also have currentat
else
local subtype = getsubtype(n)
if subtype == userskip_code then
- if getfield(getfield(n,"spec"),"width") > threshold then
+ local spec = getfield(n,"spec")
+ if getfield(spec,"width") > threshold then
if last and not somespace[currentcontent[nofcurrentcontent]] then
local a = getattr(n,a_tagged)
+or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 5a -->",currentdepth)
@@ -2406,6 +2618,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
elseif subtype == spaceskip_code or subtype == xspaceskip_code then
if not somespace[currentcontent[nofcurrentcontent]] then
local a = getattr(n,a_tagged)
+or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 7 (stay in element) -->",currentdepth)
@@ -2435,6 +2648,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
elseif not somespace[r] then
local a = getattr(n,a_tagged)
+or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 1 (end of line, stay in element) -->",currentdepth)
@@ -2465,6 +2679,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
local ai = getattr(n,a_image)
if ai then
local at = getattr(n,a_tagged)
+or pat
if nofcurrentcontent > 0 then
pushcontent()
pushentry(currentnesting) -- ??
@@ -2479,7 +2694,9 @@ local function collectresults(head,list) -- is last used (we also have currentat
-- we need to determine an end-of-line
local list = getlist(n)
if list then
- collectresults(list,n)
+local at = getattr(n,a_tagged)
+or pat
+ collectresults(list,n,at)
end
end
elseif id == kern_code then
@@ -2492,6 +2709,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
if kern > limit then
if last and not somespace[currentcontent[nofcurrentcontent]] then
local a = getattr(n,a_tagged)
+or pat
if a == last then
if not somespace[currentcontent[nofcurrentcontent]] then
if trace_export then
@@ -2531,6 +2749,19 @@ function nodes.handlers.export(head) -- hooks into the page builder
end
-- continueexport()
restart = true
+
+-- local function f(head,depth,pat)
+-- for n in node.traverse(head) do
+-- local a = n[a_tagged] or pat
+-- local t = taglist[a]
+-- print(depth,n,a,t and table.concat(t," "))
+-- if n.id == hlist_code or n.id == vlist_code and n.list then
+-- f(n.list,depth+1,a)
+-- end
+-- end
+-- end
+-- f(head,1)
+
collectresults(tonut(head))
if trace_export then
report_export("%w<!-- stop flushing page -->",currentdepth)
@@ -2578,31 +2809,35 @@ local f_cssheadlink = formatters [ [[
<link type="text/css" rel="stylesheet" href="%s"/>
]] ]
- local function allusedstylesheets(xmlfile,cssfiles,files)
+ local function allusedstylesheets(cssfiles,files,path)
+ local done = { }
local result = { }
local extras = { }
for i=1,#cssfiles do
local cssfile = cssfiles[i]
- if type(cssfile) ~= "string" or cssfile == v_yes or cssfile == "" or cssfile == xmlfile then
- cssfile = file.replacesuffix(xmlfile,"css")
- else
- cssfile = file.addsuffix(cssfile,"css")
+ if type(cssfile) ~= "string" then
+ -- error
+ elseif cssfile == "export-example.css" then
+ -- ignore
+ elseif not done[cssfile] then
+ cssfile = file.join(path,cssfile)
+ report_export("adding css reference '%s'",cssfile)
+ files[#files+1] = cssfile
+ result[#result+1] = f_csspreamble(cssfile)
+ extras[#extras+1] = f_cssheadlink(cssfile)
+ done[cssfile] = true
end
- files[#files+1] = cssfile
- report_export("adding css reference '%s'",cssfile)
- result[#result+1] = f_csspreamble(cssfile)
- extras[#extras+1] = f_cssheadlink(cssfile)
end
return concat(result), concat(extras)
end
local f_e_template = [[
-%element% {
+%element%, div.%element% {
display: %display% ;
}]]
local f_d_template = [[
-%element%[detail=%detail%], div.detail-%detail% {
+%element%[detail=%detail%], div.%element%.detail-%detail% {
display: %display% ;
}]]
@@ -2640,23 +2875,27 @@ local htmltemplate = [[
mixed = "inline",
}
- local function allusedelements(xmlfile)
- local result = { formatters["/* %s for file %s */"]("template",xmlfile) }
+ local function allusedelements(basename)
+ local result = { formatters["/* %s for file %s */"]("template",basename) }
for element, details in sortedhash(used) do
- result[#result+1] = f_category(element)
- for detail, nature in sortedhash(details) do
- local display = displaymapping[nature or "display"] or "block"
- if detail == "" then
- result[#result+1] = replacetemplate(f_e_template, {
- element = element,
- display = display,
- })
- else
- result[#result+1] = replacetemplate(f_d_template, {
- element = element,
- detail = detail,
- display = display,
- })
+ if namespaces[element] then
+ -- skip math
+ else
+ result[#result+1] = f_category(element)
+ for detail, nature in sortedhash(details) do
+ local display = displaymapping[nature or "display"] or "block"
+ if detail == "" then
+ result[#result+1] = replacetemplate(f_e_template, {
+ element = element,
+ display = display,
+ })
+ else
+ result[#result+1] = replacetemplate(f_d_template, {
+ element = element,
+ detail = detail,
+ display = display,
+ })
+ end
end
end
end
@@ -2771,8 +3010,11 @@ local htmltemplate = [[
--
}
- local addclicks = true
- local f_onclick = formatters[ [[location.href='%s']] ]
+ local addclicks = true
+ local f_onclick = formatters[ [[location.href='%s']] ]
+
+ local p_cleanid = lpeg.replacer { [":"] = "-" }
+ local p_cleanhref = lpeg.Cs(lpeg.P("#") * p_cleanid)
local function remap(specification,source,target)
local comment = nil -- share comments
@@ -2781,8 +3023,10 @@ local htmltemplate = [[
local tg = c.tg
local ns = c.ns
if ns == "m" then
+if false then
c.ns = ""
c.at["xmlns:m"] = nil
+end
-- elseif tg == "a" then
-- c.ns = ""
else
@@ -2813,7 +3057,9 @@ local htmltemplate = [[
local href = at.href
local class = concat(class," ")
if id then
+ id = lpegmatch(p_cleanid, id) or id
if href then
+ href = lpegmatch(p_cleanhref,href) or href
c.at = {
class = class,
id = id,
@@ -2828,6 +3074,7 @@ local htmltemplate = [[
end
else
if href then
+ href = lpegmatch(p_cleanhref,href) or href
c.at = {
class = class,
href = href,
@@ -2845,10 +3092,7 @@ local htmltemplate = [[
end
end
- local cssfile, xhtmlfile = nil, nil
-
- directives.register("backend.export.css", function(v) cssfile = v end)
- directives.register("backend.export.xhtml", function(v) xhtmlfile = v end)
+ local cssfile = nil directives.register("backend.export.css", function(v) cssfile = v end)
local function stopexport(v)
starttiming(treehash)
@@ -2856,11 +3100,7 @@ local htmltemplate = [[
finishexport()
--
report_export("")
- if xhtmlfile then
- report_export("exporting xml, xhtml and html files")
- else
- report_export("exporting xml file")
- end
+ report_export("exporting xml, xhtml and html files")
report_export("")
--
wrapups.collapsetree(tree)
@@ -2874,101 +3114,190 @@ local htmltemplate = [[
if type(v) ~= "string" or v == v_yes or v == "" then
v = tex.jobname
end
- local basename = file.basename(v)
- local xmlfile = file.addsuffix(basename,"export")
- --
- local imagefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-images","css")
- local stylefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-styles","css")
- local templatefilename = file.replacesuffix(xmlfile,"template")
- local specificationfilename = file.replacesuffix(xmlfile,"specification")
+
+ -- we use a dedicated subpath:
--
- if xhtml and not cssfile then
- cssfile = true
- end
- local cssfiles = { }
- if cssfile then
- if cssfile == true then
- cssfiles = { "export-example.css" }
+ -- ./jobname-export
+ -- ./jobname-export/images
+ -- ./jobname-export/styles
+ -- ./jobname-export/styles
+ -- ./jobname-export/jobname-export.xml
+ -- ./jobname-export/jobname-export.xhtml
+ -- ./jobname-export/jobname-export.html
+ -- ./jobname-export/jobname-specification.lua
+ -- ./jobname-export/styles/jobname-defaults.css
+ -- ./jobname-export/styles/jobname-styles.css
+ -- ./jobname-export/styles/jobname-images.css
+ -- ./jobname-export/styles/jobname-templates.css
+
+ local basename = file.basename(v)
+ local corename = file.removesuffix(basename)
+ local basepath = basename .. "-export"
+ local imagepath = file.join(basepath,"images")
+ local stylepath = file.join(basepath,"styles")
+
+ local function validpath(what,pathname)
+ if lfs.isdir(pathname) then
+ report_export("using exiting %s path %a",what,pathname)
+ return pathname
+ end
+ lfs.mkdir(pathname)
+ if lfs.isdir(pathname) then
+ report_export("using cretated %s path %a",what,basepath)
+ return pathname
else
- cssfiles = settings_to_array(cssfile or "")
+ report_export("unable to create %s path %a",what,basepath)
+ return false
end
- insert(cssfiles,1,imagefilename)
- insert(cssfiles,1,stylefilename)
end
- cssfiles = table.unique(cssfiles)
+
+ if not (validpath("export",basepath) and validpath("images",imagepath) and validpath("styles",stylepath)) then
+ return
+ end
+
+ -- we're now on the dedicated export subpath so we can't clash names
+
+ local xmlfilebase = file.addsuffix(basename .. "-raw","xml" )
+ local xhtmlfilebase = file.addsuffix(basename .. "-tag","xhtml")
+ local htmlfilebase = file.addsuffix(basename .. "-div","xhtml")
+ local specificationfilebase = file.addsuffix(basename .. "-pub","lua" )
+
+ local xmlfilename = file.join(basepath, xmlfilebase )
+ local xhtmlfilename = file.join(basepath, xhtmlfilebase )
+ local htmlfilename = file.join(basepath, htmlfilebase )
+ local specificationfilename = file.join(basepath, specificationfilebase)
--
- local result = allcontent(tree) -- also does some housekeeping and data collecting
+ local defaultfilebase = file.addsuffix(basename .. "-defaults", "css")
+ local imagefilebase = file.addsuffix(basename .. "-images", "css")
+ local stylefilebase = file.addsuffix(basename .. "-styles", "css")
+ local templatefilebase = file.addsuffix(basename .. "-templates","css")
--
+ local defaultfilename = file.join(stylepath,defaultfilebase )
+ local imagefilename = file.join(stylepath,imagefilebase )
+ local stylefilename = file.join(stylepath,stylefilebase )
+ local templatefilename = file.join(stylepath,templatefilebase)
+
+ -- we keep track of all used files
+
local files = {
}
- local x_styles, h_styles = allusedstylesheets(xmlfile,cssfiles,files)
+
+ -- we always load the defaults and optionally extra css files; we also copy the example
+ -- css file so that we always have the latest version
+
+ local cssfiles = {
+ defaultfilebase,
+ imagefilebase,
+ stylefilebase,
+ }
+
+ local examplefilename = resolvers.find_file("export-example.css")
+ if examplefilename then
+ file.copy(examplefilename,defaultfilename)
+ end
+
+ if type(cssfile) == "string" then
+ local list = table.unique(settings_to_array(cssfile))
+ for i=1,#list do
+ local source = file.addsuffix(list[i],"css")
+ local target = source
+ cssfiles[#cssfiles+1] = target
+ -- todo: warning if no file yet
+ end
+ end
+
+ local x_styles, h_styles = allusedstylesheets(cssfiles,files,"styles")
+
+ -- at this point we're ready for the content; the collector also does some
+ -- housekeeping and data collecting; at this point we still have an xml
+ -- representation that uses verbose element names and carries information in
+ -- attributes
+
+ local result = allcontent(tree)
+
local results = concat {
wholepreamble(true),
x_styles, -- adds to files
result,
}
- --
- files = table.unique(files)
- --
- report_export("saving xml data in %a",xmlfile)
- io.savedata(xmlfile,results)
- --
+
+ cssfiles = table.unique(cssfiles)
+
+ -- we're now ready for saving the result in the xml file
+
+ report_export("saving xml data in %a",xmlfilename)
+ io.savedata(xmlfilename,results)
+
report_export("saving css image definitions in %a",imagefilename)
- io.savedata(imagefilename,wrapups.allusedimages(xmlfile))
- --
+ io.savedata(imagefilename,wrapups.allusedimages(basename))
+
report_export("saving css style definitions in %a",stylefilename)
- io.savedata(stylefilename,wrapups.allusedstyles(xmlfile))
- --
+ io.savedata(stylefilename,wrapups.allusedstyles(basename))
+
report_export("saving css template in %a",templatefilename)
- io.savedata(templatefilename,allusedelements(xmlfile))
- --
- local xmltree = nil
- if xhtmlfile then
- -- basic
- if type(v) ~= "string" or xhtmlfile == true or xhtmlfile == v_yes or xhtmlfile == "" or xhtmlfile == xmlfile then
- xhtmlfile = file.replacesuffix(xmlfile,"xhtml")
- else
- xhtmlfile = file.addsuffix(xhtmlfile,"xhtml")
- end
- files[#files+1] = xhtmlfile
- report_export("saving xhtml variant in %a",xhtmlfile)
- xmltree = cleanxhtmltree(xml.convert(results))
- xml.save(xmltree,xhtmlfile)
- -- looking at identity is somewhat redundant as we also inherit from interaction
- -- at the tex end
- local identity = interactions.general.getidentity()
- local specification = {
- name = file.removesuffix(v),
- identifier = os.uuid(),
- images = wrapups.uniqueusedimages(),
- imagefile = imagefilename,
- stylefile = stylefilename,
- root = xhtmlfile,
- files = files,
- language = languagenames[texgetcount("mainlanguagenumber")],
- title = validstring(finetuning.title) or validstring(identity.title),
- subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
- author = validstring(finetuning.author) or validstring(identity.author),
- firstpage = validstring(finetuning.firstpage),
- lastpage = validstring(finetuning.lastpage),
- }
- report_export("saving specification in %a",specificationfilename,specificationfilename)
- io.savedata(specificationfilename,table.serialize(specification,true))
- -- bonus
- local resultfile = file.replacesuffix(xmlfile,"html")
- report_export("saving div based alternative in %a",resultfile)
- remap(specification,xmltree)
- local variables = {
- style = h_styles,
- body = xml.tostring(xml.first(xmltree,"/div")),
- preamble = wholepreamble(false),
- title = specification.title,
- }
- io.savedata(resultfile,replacetemplate(htmltemplate,variables,"xml"))
- report_export("")
- report_export([[create epub with: mtxrun --script epub --make "%s"]],file.nameonly(resultfile))
- report_export("")
- end
+ io.savedata(templatefilename,allusedelements(basename))
+
+ -- additionally we save an xhtml file; for that we load the file as xml tree
+
+ report_export("saving xhtml variant in %a",xhtmlfilename)
+
+ local xmltree = cleanxhtmltree(xml.convert(results))
+
+ xml.save(xmltree,xhtmlfilename)
+
+ -- now we save a specification file that can b eused for generating an epub file
+
+ -- looking at identity is somewhat redundant as we also inherit from interaction
+ -- at the tex end
+
+ local identity = interactions.general.getidentity()
+
+ local specification = {
+ name = file.removesuffix(v),
+ identifier = os.uuid(),
+ images = wrapups.uniqueusedimages(),
+ imagefile = file.join("styles",imagefilebase),
+ imagepath = "images",
+ stylepath = "styles",
+ xmlfiles = { xmlfilebase },
+ xhtmlfiles = { xhtmlfilebase },
+ htmlfiles = { htmlfilebase },
+ styles = cssfiles,
+ htmlroot = htmlfilebase,
+ language = languagenames[texgetcount("mainlanguagenumber")],
+ title = validstring(finetuning.title) or validstring(identity.title),
+ subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
+ author = validstring(finetuning.author) or validstring(identity.author),
+ firstpage = validstring(finetuning.firstpage),
+ lastpage = validstring(finetuning.lastpage),
+ }
+
+ report_export("saving specification in %a",specificationfilename,specificationfilename)
+
+ io.savedata(specificationfilename,table.serialize(specification,true))
+
+ -- the html export for epub is different in the sense that it uses div's instead of
+ -- specific tags
+
+ report_export("saving div based alternative in %a",htmlfilename)
+
+ remap(specification,xmltree)
+
+ local variables = {
+ style = h_styles,
+ body = xml.tostring(xml.first(xmltree,"/div")),
+ preamble = wholepreamble(false),
+ title = specification.title,
+ }
+
+ io.savedata(htmlfilename,replacetemplate(htmltemplate,variables,"xml"))
+
+ -- finally we report how an epub file can be made (using the specification)
+
+ report_export("")
+ report_export('create epub with: mtxrun --script epub --make "%s" [--purge --rename --svgmath]',file.nameonly(basename))
+ report_export("")
+
stoptiming(treehash)
end
diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv
index bda056fac..7a9824555 100644
--- a/tex/context/base/back-exp.mkiv
+++ b/tex/context/base/back-exp.mkiv
@@ -164,10 +164,11 @@
% \c!lastpage=, % imagename
\c!alternative=, % html, div
\c!properties=\v!no, % no: ignore, yes: as attribute, otherwise: use as prefix
- \c!hyphen=\v!no]
+ \c!hyphen=\v!no,
+ \c!svgstyle=]
\setupbackend
- [css=export-example.css]
+ [css=] % ?
\def\dosynchronizeexport
{\let\currentexport\empty
@@ -182,6 +183,7 @@
author = \!!bs\exportparameter\c!author\!!es,
firstpage = "\exportparameter\c!firstpage",
lastpage = "\exportparameter\c!lastpage",
+ svgstyle = "\exportparameter\c!svgstyle",
}}}
\appendtoks
diff --git a/tex/context/base/char-ini.mkiv b/tex/context/base/char-ini.mkiv
index 4fb63d93e..e130e200f 100644
--- a/tex/context/base/char-ini.mkiv
+++ b/tex/context/base/char-ini.mkiv
@@ -16,6 +16,7 @@
\registerctxluafile{char-fio}{1.001}
\registerctxluafile{char-map}{1.001} % maybe we will load this someplace else
\registerctxluafile{char-tex}{1.001}
+\registerctxluafile{char-ent}{1.001}
\unprotect
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 37a9ead0f..58f53214e 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2014.09.18 11:17}
+\newcontextversion{2014.09.26 11:42}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index e0c719446..71b88d27b 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index f060a710b..509472134 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -28,7 +28,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.09.18 11:17}
+\edef\contextversion{2014.09.26 11:42}
\edef\contextkind {beta}
%D For those who want to use this:
@@ -184,6 +184,7 @@
\loadmarkfile{hand-ini}
\loadmarkfile{lang-ini}
+\loadmarkfile{lang-hyp}
\loadmarkfile{lang-lab}
\loadmarkfile{unic-ini}
diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua
index 3dd16c23b..13d7627d2 100644
--- a/tex/context/base/data-res.lua
+++ b/tex/context/base/data-res.lua
@@ -1459,6 +1459,9 @@ end
-- -- -- end of main file search routing -- -- --
local function findfiles(filename,filetype,allresults)
+ if not filename or filename == "" then
+ return { }
+ end
local result, status = collect_instance_files(filename,filetype or "",allresults)
if not result or #result == 0 then
local lowered = lower(filename)
diff --git a/tex/context/base/data-sch.lua b/tex/context/base/data-sch.lua
index adc774489..1e1077b03 100644
--- a/tex/context/base/data-sch.lua
+++ b/tex/context/base/data-sch.lua
@@ -31,8 +31,18 @@ function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+-- function cleaners.strip(specification)
+-- -- todo: only keep suffix periods, so after the last
+-- return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+-- end
+
+function cleaners.strip(specification) -- keep suffixes
+ local path, name = file.splitbase(specification.original)
+ if path == "" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-") .. "-" .. name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
diff --git a/tex/context/base/export-example.css b/tex/context/base/export-example.css
index f78014a5d..2962a790f 100644
--- a/tex/context/base/export-example.css
+++ b/tex/context/base/export-example.css
@@ -687,7 +687,19 @@ link, div.link {
/* margintextblock : inline */
/* margintext : inline */
-/* math : inline */
+margintext, div.margintext {
+ display : block ;
+ font-weight : bold ;
+ margin-top : 1em ;
+ margin-bottom : 1em ;
+}
+
+margintext:before, div.margintext:before {
+ content : "\25B6\00A0\00A0" ;
+ color : rgb(40%,40%,40%) ;
+}
+
+/* math : inline | display */
/* mn : mixed */
/* mi : mixed */
/* mo : mixed */
@@ -710,6 +722,16 @@ link, div.link {
/* mtr : display */
/* mtd : display */
+div.math-inline {
+ display : inline ;
+ vertical-align : 0 ; /* this will be set directly */
+}
+
+div.math-display {
+ display : block ;
+ margin : 1ex 0ex 1em 3em ;
+}
+
/* quantity : inline */
/* unit : inline */
/* number : inline */
diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua
index e366f746d..5920501dd 100644
--- a/tex/context/base/font-ctx.lua
+++ b/tex/context/base/font-ctx.lua
@@ -1467,9 +1467,109 @@ local function indextoslot(index)
end
end
+do -- else too many locals
-helpers.nametoslot = nametoslot
-helpers.indextoslot = indextoslot
+ local entities = characters.entities
+ local lowered = { } -- delayed initialization
+
+ table.setmetatableindex(lowered,function(t,k)
+ for k, v in next, entities do
+ local l = lower(k)
+ if not entities[l] then
+ lowered[l] = v
+ end
+ end
+ table.setmetatableindex(lowered,nil)
+ return lowered[k]
+ end)
+
+ local methods = {
+ -- entity
+ e = function(name)
+ return entities[name] or lowered[name] or name
+ end,
+ -- hexadecimal unicode
+ x = function(name)
+ local n = tonumber(name,16)
+ return n and utfchar(n) or name
+ end,
+ -- decimal unicode
+ d = function(name)
+ local n = tonumber(name)
+ return n and utfchar(n) or name
+ end,
+ -- hexadecimal index (slot)
+ s = function(name)
+ local n = tonumber(name,16)
+ local n = n and indextoslot(n)
+ return n and utfchar(n) or name
+ end,
+ -- decimal index
+ i = function(name)
+ local n = tonumber(name)
+ local n = n and indextoslot(n)
+ return n and utfchar(n) or name
+ end,
+ -- name
+ n = function(name)
+ local n = nametoslot(name)
+ return n and utfchar(n) or name
+ end,
+ -- char
+ c = function(name)
+ return name
+ end,
+ }
+
+ -- -- nicer:
+ --
+ -- table.setmetatableindex(methods,function(t,k) return methods.c end)
+ --
+ -- local splitter = (C(1) * P(":") + Cc("c")) * C(P(1)^1) / function(method,name)
+ -- return methods[method](name)
+ -- end
+ --
+ -- -- more efficient:
+
+ local splitter = C(1) * P(":") * C(P(1)^1) / function(method,name)
+ local action = methods[method]
+ return action and action(name) or name
+ end
+
+ local function tochar(str)
+ local t = type(str)
+ if t == "number" then
+ return utfchar(str)
+ elseif t == "string" then
+ return lpegmatch(splitter,str) or str
+ end
+ end
+
+ helpers.nametoslot = nametoslot
+ helpers.indextoslot = indextoslot
+ helpers.tochar = tochar
+
+ -- interfaces:
+
+ function commands.fontchar(n)
+ n = nametoslot(n)
+ if n then
+ context_char(n)
+ end
+ end
+
+ function commands.fontcharbyindex(n)
+ n = indextoslot(n)
+ if n then
+ context_char(n)
+ end
+ end
+
+ function commands.tochar(str)
+ context(tochar(str))
+ end
+
+end
-- this will change ...
@@ -1617,20 +1717,6 @@ local context_getvalue = context.getvalue
local commands_doifelse = commands.doifelse
-function commands.fontchar(n)
- n = nametoslot(n)
- if n then
- context_char(n)
- end
-end
-
-function commands.fontcharbyindex(n)
- n = indextoslot(n)
- if n then
- context_char(n)
- end
-end
-
function commands.doifelsecurrentfonthasfeature(name) -- can be made faster with a supportedfeatures hash
local f = fontdata[currentfont()]
f = f and f.shared
diff --git a/tex/context/base/font-ini.mkvi b/tex/context/base/font-ini.mkvi
index 556816f6d..c427c2f89 100644
--- a/tex/context/base/font-ini.mkvi
+++ b/tex/context/base/font-ini.mkvi
@@ -2175,6 +2175,23 @@
\unexpanded\def\fontcharbyindex#index% unofficial command, for idris' font building
{\ctxcommand{fontcharbyindex(\number#index)}}
+%D The \type {\tochar} commmand takes a specification:
+%D
+%D \starttabulate[|l|l|l|]
+%D \NC e \NC entity \NC e:eacute \NC \NR
+%D \NC x \NC hexadecimal unicode \NC x:013D \NC \NR
+%D \NC d \NC decimal unicode \NC d:123 \NC \NR
+%D \NC s \NC hexadecimal index (slot) \NC s:210D \NC \NR
+%D \NC i \NC decimal index \NC i:456 \NC \NR
+%D \NC n \NC name \NC n:eight \NC \NR
+%D \NC c \NC name \NC c:x \NC \NR
+%D \stoptabulate
+%D
+%D This is an expandable command!
+
+\def\tochar#specifications%
+ {\ctxcommand{tochar("#specifications")}} % expanded (also used in edef)
+
%D The next auxilliary macro is an alternative to \type
%D {\fontname}.
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index ea7fe803a..1b50977ea 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.759
+otf.version = otf.version or 2.760
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
function otf.loadcached(filename,format,sub)
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index 688989596..c1b23983d 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -48,7 +48,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.759 -- beware: also sync font-mis.lua
+otf.version = 2.760 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -203,7 +203,6 @@ local valid_fields = table.tohash {
"extrema_bound",
"familyname",
"fontname",
- "fontname",
"fontstyle_id",
"fontstyle_name",
"fullname",
@@ -480,6 +479,8 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
lookuptypes = {
},
},
+ warnings = {
+ },
metadata = {
-- raw metadata, not to be used
},
@@ -1789,6 +1790,12 @@ end
-- future versions will remove _
+local valid = (lpeg.R("\x00\x7E") - lpeg.S("(){}[]<>%/ \n\r\f\v"))^0 * lpeg.P(-1)
+
+local function valid_ps_name(str)
+ return str and str ~= "" and #str < 64 and lpegmatch(valid,str) and true or false
+end
+
actions["check metadata"] = function(data,filename,raw)
local metadata = data.metadata
for _, k in next, mainfields do
@@ -1808,9 +1815,38 @@ actions["check metadata"] = function(data,filename,raw)
end
--
if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
- local name = file.nameonly(filename)
- metadata.fontname = "bad-fontname-" .. name
- metadata.fullname = "bad-fullname-" .. name
+ -- the ff library does a bit too much (and wrong) checking ... so we need to catch this
+ -- at least for now
+ local function valid(what)
+ local names = raw.names
+ for i=1,#names do
+ local list = names[i]
+ local names = list.names
+ if names then
+ local name = names[what]
+ if name and valid_ps_name(name) then
+ return name
+ end
+ end
+ end
+ end
+ local function check(what)
+ local oldname = metadata[what]
+ if valid_ps_name(oldname) then
+ report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ else
+ local newname = valid(what)
+ if not newname then
+ newname = formatters["bad-%s-%s"](what,file.nameonly(filename))
+ end
+ local warning = formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
+ data.warnings[#data.warnings+1] = warning
+ report_otf(warning)
+ metadata[what] = newname
+ end
+ end
+ check("fontname")
+ check("fullname")
end
--
end
@@ -1964,6 +2000,7 @@ end
local function copytotfm(data,cache_id)
if data then
local metadata = data.metadata
+ local warnings = data.warnings
local resources = data.resources
local properties = derivetable(data.properties)
local descriptions = derivetable(data.descriptions)
@@ -2058,6 +2095,7 @@ local function copytotfm(data,cache_id)
local filename = constructors.checkedfilename(resources)
local fontname = metadata.fontname
local fullname = metadata.fullname or fontname
+ local psname = fontname or fullname
local units = metadata.units_per_em or 1000
--
if units == 0 then -- catch bugs in fonts
@@ -2151,11 +2189,21 @@ local function copytotfm(data,cache_id)
properties.filename = filename
properties.fontname = fontname
properties.fullname = fullname
- properties.psname = fontname or fullname
+ properties.psname = psname
properties.name = filename or fullname
--
-- properties.name = specification.name
-- properties.sub = specification.sub
+ --
+ if warnings and #warnings > 0 then
+ report_otf("warnings for font: %s",filename)
+ report_otf()
+ for i=1,#warnings do
+ report_otf(" %s",warnings[i])
+ end
+ report_otf()
+ end
+ --
return {
characters = characters,
descriptions = descriptions,
@@ -2164,6 +2212,7 @@ local function copytotfm(data,cache_id)
resources = resources,
properties = properties,
goodies = goodies,
+ warnings = warnings,
}
end
end
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index d3b13a680..f83c759b3 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -1362,6 +1362,11 @@ end
local function runprogram(binary,argument,variables)
-- os.which remembers found programs
+-- if not variables and type(binary) == "table" and binary.command then
+-- variables = argument
+-- argument = binary.argument
+-- binary = binary.command
+-- end
local found = nil
if type(binary) == "table" then
for i=1,#binary do
@@ -1403,6 +1408,8 @@ local epsconverter = converters.eps or { }
converters.eps = epsconverter
converters.ps = epsconverter
+-- todo: colorspace
+
local epstopdf = {
resolutions = {
[v_low] = "screen",
@@ -1483,22 +1490,22 @@ epsconverter.default = epsconverter.pdf
local pdfconverter = converters.pdf or { }
converters.pdf = pdfconverter
-programs.pdftoeps = {
- command = "pdftops",
- argument = [[-eps "%oldname%" "%newname%]],
-}
-
-pdfconverter.stripped = function(oldname,newname)
- local pdftoeps = programs.pdftoeps -- can be changed
- local epstopdf = programs.epstopdf -- can be changed
- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
- local tmpname = newname .. ".tmp"
- runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets })
- runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets })
- os.remove(tmpname)
-end
-
-figures.registersuffix("stripped","pdf")
+-- programs.pdftoeps = {
+-- command = "pdftops",
+-- argument = [[-eps "%oldname%" "%newname%"]],
+-- }
+--
+-- pdfconverter.stripped = function(oldname,newname)
+-- local pdftoeps = programs.pdftoeps -- can be changed
+-- local epstopdf = programs.epstopdf -- can be changed
+-- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
+-- local tmpname = newname .. ".tmp"
+-- runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets })
+-- runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets })
+-- os.remove(tmpname)
+-- end
+--
+-- figures.registersuffix("stripped","pdf")
-- -- -- svg -- -- --
@@ -1575,6 +1582,70 @@ bmpconverter.default = converter
-- todo: lowres
+-- cmyk conversion
+
+local rgbprofile = "srgb.icc"
+local cmykprofile = "isocoated_v2_eci.icc"
+
+directives.register("graphics.conversion.rgbprofile", function(v) rgbprofile = type(v) == "string" and v or rgbprofile end)
+directives.register("graphics.conversion.cmykprofile",function(v) cmykprofile = type(v) == "string" and v or cmykprofile end)
+
+local function profiles()
+ if not lfs.isfile(rgbprofile) then
+ local found = resolvers.findfile(rgbprofile)
+ if found and found ~= "" then
+ rgbprofile = found
+ else
+ report_figures("unknown profile %a",rgbprofile)
+ end
+ end
+ if not lfs.isfile(cmykprofile) then
+ local found = resolvers.findfile(cmykprofile)
+ if found and found ~= "" then
+ cmykprofile = found
+ else
+ report_figures("unknown profile %a",cmykprofile)
+ end
+ end
+ return rgbprofile, cmykprofile
+end
+
+programs.pngtocmykpdf = {
+ command = "gm",
+ argument = [[convert -strip +profile "*" -profile "%rgbprofile%" -profile "%cmykprofile%" -colorspace cmyk -strip -sampling-factor 1x1 "%oldname%" "%newname%"]],
+}
+
+programs.jpgtocmykpdf = {
+ command = "gm",
+ argument = [[convert -strip +profile "*" -profile "%rgbprofile%" -profile "%cmykprofile%" -colorspace cmyk -strip -sampling-factor 1x1 -compress JPEG "%oldname%" "%newname%"]],
+}
+
+figures.converters.png = {
+ ["cmyk.pdf"] = function(oldname,newname,resolution)
+ local rgbprofile, cmykprofile = profiles()
+ runprogram(programs.pngtocmykpdf.command, programs.pngtocmykpdf.argument, {
+-- new: runprogram(programs.pngtocmykpdf, {
+ rgbprofile = rgbprofile,
+ cmykprofile = cmykprofile,
+ oldname = oldname,
+ newname = newname,
+ } )
+ end,
+}
+
+figures.converters.jpg = {
+ ["cmyk.pdf"] = function(oldname,newname,resolution)
+ local rgbprofile, cmykprofile = profiles()
+ runprogram(programs.jpgtocmykpdf.command, programs.jpgtocmykpdf.argument, {
+-- new: runprogram(programs.jpgtocmykpdf, {
+ rgbprofile = rgbprofile,
+ cmykprofile = cmykprofile,
+ oldname = oldname,
+ newname = newname,
+ } )
+ end,
+}
+
-- -- -- bases -- -- --
local bases = allocate()
diff --git a/tex/context/base/lang-hyp.lua b/tex/context/base/lang-hyp.lua
new file mode 100644
index 000000000..3b5eac9ba
--- /dev/null
+++ b/tex/context/base/lang-hyp.lua
@@ -0,0 +1,663 @@
+if not modules then modules = { } end modules ['lang-hyp'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- In an automated workflow hypenation of long titles can be somewhat problematic
+-- especially when demands conflict. For that reason I played a bit with a Lua based
+-- variant of the traditional hyphenation machinery. This mechanism has been extended
+-- several times in projects, of which a good description can be found in TUGboat,
+-- Volume 27 (2006), No. 2 — Proceedings of EuroTEX2006: Automatic non-standard
+-- hyphenation in OpenOffice.org by László Németh.
+--
+-- Being the result of two days experimenting the following implementation is probably
+-- not completely okay yet. If there is demand I might add some more features and plugs.
+-- The performance is quite okay but can probably improved a bit, although this is not
+-- the most critital code.
+--
+-- . a l g o r i t h m .
+-- 4l1g4
+-- l g o3
+-- 1g o
+-- 2i t h
+-- 4h1m
+-- ---------------------
+-- 4 1 4 3 2 0 4 1
+-- a l-g o-r i t h-m
+
+-- . a s s z o n n y a l .
+-- s1s z/sz=sz,1,3
+-- n1n y/ny=ny,1,3
+-- -----------------------
+-- 0 1 0 0 0 1 0 0 0/sz=sz,2,3,ny=ny,6,3
+-- a s-s z o n-n y a l/sz=sz,2,3,ny=ny,6,3
+--
+-- ab1cd/ef=gh,2,2 : acd - efd (pattern/replacement,start,length
+
+local type, rawset, tonumber = type, rawset, tonumber
+
+local P, R, S, Cg, Cf, Ct, Cc, C, Carg, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cc, lpeg.C, lpeg.Carg, lpeg.Cs
+local lpegmatch = lpeg.match
+
+local concat = table.concat
+
+local utfchar = utf.char
+local utfbyte = utf.byte
+
+if not characters then
+ require("char-ini")
+end
+
+local setmetatableindex = table.setmetatableindex
+
+local languages = languages or { }
+local hyphenators = languages.hyphenators or { }
+languages.hyphenators = hyphenators
+local traditional = hyphenators.traditional or { }
+hyphenators.traditional = traditional
+
+local dictionaries = setmetatableindex(function(t,k)
+ local v = {
+ patterns = { },
+ hyphenated = { },
+ specials = { },
+ }
+ t[k] = v
+ return v
+end)
+
+local digit = R("09")
+local character = lpeg.patterns.utf8character - P("/")
+local splitpattern_k = Cs((digit/"" + character)^1)
+local splitpattern_v = Ct(((digit/tonumber + Cc(0)) * character)^1 * (digit/tonumber)^0)
+local splitpattern_v =
+ Ct(((digit/tonumber + Cc(0)) * character)^1 * (digit/tonumber)^0) *
+ (P("/") * Cf ( Ct("") *
+ Cg ( Cc("before") * C((1-lpeg.P("="))^1) * P("=") )
+ * Cg ( Cc("after") * C((1-lpeg.P(","))^1) * P(",") )
+ * Cg ( Cc("start") * ((1-lpeg.P(","))^1/tonumber) * P(",") )
+ * Cg ( Cc("length") * ((1-lpeg.P(-1) )^1/tonumber) )
+ , rawset))^-1
+
+local function register(patterns,specials,str,specification)
+ local k = lpegmatch(splitpattern_k,str)
+ local v1, v2 = lpegmatch(splitpattern_v,str)
+ patterns[k] = v1
+ if specification then
+ specials[k] = specification
+ elseif v2 then
+ specials[k] = v2
+ end
+end
+
+local word = ((Carg(1) * Carg(2) * C((1 - P(" "))^1)) / register + 1)^1
+local split = Ct(C(character)^1)
+
+function traditional.loadpatterns(language,filename)
+ local specification = require(filename)
+ local dictionary = dictionaries[language]
+ if specification then
+ local patterns = specification.patterns
+ if patterns then
+ lpegmatch(word,patterns.data,1,dictionary.patterns,dictionary.specials)
+ end
+ end
+ return dictionary
+end
+
+local lcchars = characters.lcchars
+local uccodes = characters.uccodes
+local nofwords = 0
+local nofhashed = 0
+
+local function hyphenate(dictionary,word)
+ nofwords = nofwords + 1
+ local hyphenated = dictionary.hyphenated
+ local isstring = type(word) == "string"
+ local done
+ if isstring then
+ done = hyphenated[word]
+ else
+ done = hyphenated[concat(word)]
+ end
+ if done ~= nil then
+ return done
+ else
+ done = false
+ end
+ local specials = dictionary.specials
+ local patterns = dictionary.patterns
+ local s = isstring and lpegmatch(split,word) or word
+ local l = #s
+ local w = { }
+ for i=1,l do
+ local si = s[i]
+ w[i] = lcchars[si] or si
+ end
+ local spec
+ for i=1,l do
+ for j=i,l do
+ local c = concat(w,"",i,j)
+ local m = patterns[c]
+ if m then
+ local s = specials[c]
+ if not done then
+ done = { }
+ spec = { }
+ for i=1,l do
+ done[i] = 0
+ end
+ end
+ for k=1,#m do
+ local new = m[k]
+ if not new then
+ break
+ elseif new > 0 then
+ local pos = i + k - 1
+ local old = done[pos]
+ if not old then
+ -- break ?
+ elseif new > old then
+ done[pos] = new
+ if s then
+ local b = i + s.start - 1
+ local e = b + s.length - 1
+ if pos >= b and pos <= e then
+ spec[pos] = s
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ local okay = false
+ for i=1,#done do
+ if done[i] % 2 == 1 then
+ done[i] = spec[i] or true
+ okay = true
+ else
+ done[i] = false
+ end
+ end
+ if not okay then
+ done = false
+ end
+ end
+ hyphenated[isstring and word or concat(word)] = done
+ nofhashed = nofhashed + 1
+ return done
+end
+
+local f_detail_1 = string.formatters["{%s}{%s}{}"]
+local f_detail_2 = string.formatters["{%s%s}{%s%s}{%s}"]
+
+function traditional.injecthyphens(dictionary,word,specification)
+ local h = hyphenate(dictionary,word)
+ if not h then
+ return word
+ end
+ local w = lpegmatch(split,word)
+ local r = { }
+ local l = #h
+ local n = 0
+ local i = 1
+ local leftmin = specification.lefthyphenmin or 2
+ local rightmin = l - (specification.righthyphenmin or left) + 1
+ local leftchar = specification.lefthyphenchar
+ local rightchar = specification.righthyphenchar
+ while i <= l do
+ if i > leftmin and i < rightmin then
+ local hi = h[i]
+ if not hi then
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ elseif hi == true then
+ n = n + 1
+ r[n] = f_detail_1(rightchar,leftchar)
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ else
+ local b = i - hi.start
+ local e = b + hi.length - 1
+ n = b
+ r[n] = f_detail_2(hi.before,rightchar,leftchar,hi.after,concat(w,"",b,e))
+ if e + 1 == i then
+ i = i + 1
+ else
+ i = e + 1
+ end
+ end
+ else
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ end
+ end
+ return concat(r)
+end
+
+function traditional.registerpattern(language,str,specification)
+ local dictionary = dictionaries[language]
+ register(dictionary.patterns,dictionary.specials,str,specification)
+end
+
+-- todo: unicodes or utfhash ?
+
+if context then
+
+ local nodecodes = nodes.nodecodes
+ local glyph_code = nodecodes.glyph
+ local math_code = nodecodes.math
+
+ local nuts = nodes.nuts
+ local tonut = nodes.tonut
+ local nodepool = nuts.pool
+
+ local new_disc = nodepool.disc
+
+ local setfield = nuts.setfield
+ local getfield = nuts.getfield
+ local getchar = nuts.getchar
+ local getid = nuts.getid
+ local getnext = nuts.getnext
+ local getprev = nuts.getprev
+ local insert_before = nuts.insert_before
+ local insert_after = nuts.insert_after
+ local copy_node = nuts.copy
+ local remove_node = nuts.remove
+ local end_of_math = nuts.end_of_math
+ local node_tail = nuts.tail
+
+ function traditional.loadpatterns(language)
+ return dictionaries[language]
+ end
+
+ statistics.register("hyphenation",function()
+ if nofwords > 0 then
+ return string.format("%s words hyphenated, %s unique",nofwords,nofhashed)
+ end
+ end)
+
+ setmetatableindex(dictionaries,function(t,k) -- we use an independent data structure
+ local specification = languages.getdata(k)
+ local dictionary = {
+ patterns = { },
+ hyphenated = { },
+ specials = { },
+ instance = 0,
+ characters = { },
+ unicodes = { },
+ }
+ if specification then
+ local resources = specification.resources
+ if resources then
+ local patterns = resources.patterns
+ if patterns then
+ local data = patterns.data
+ if data then
+ -- regular patterns
+ lpegmatch(word,data,1,dictionary.patterns,dictionary.specials)
+ end
+ local extra = patterns.extra
+ if extra then
+ -- special patterns
+ lpegmatch(word,extra,1,dictionary.patterns,dictionary.specials)
+ end
+ end
+ local usedchars = lpegmatch(split,patterns.characters)
+ local characters = { }
+ local unicodes = { }
+ for i=1,#usedchars do
+ local char = usedchars[i]
+ local code = utfbyte(char)
+ local upper = uccodes[code]
+ characters[char] = code
+ unicodes [code] = char
+ unicodes [upper] = utfchar(upper)
+ end
+ dictionary.characters = characters
+ dictionary.unicodes = unicodes
+ setmetatableindex(characters,function(t,k) local v = utfbyte(k) t[k] = v return v end) -- can be non standard
+ -- setmetatableindex(unicodes, function(t,k) local v = utfchar(k) t[k] = v return v end)
+ end
+ t[specification.number] = dictionary
+ dictionary.instance = specification.instance -- needed for hyphenchars
+ end
+ t[k] = dictionary
+ return dictionary
+ end)
+
+ local function flush(head,start,stop,dictionary,w,h,lefthyphenchar,righthyphenchar,characters,lefthyphenmin,righthyphenmin)
+ local r = { }
+ local l = #h
+ local n = 0
+ local i = 1
+ local left = lefthyphenmin
+ local right = l - righthyphenmin + 1
+ while i <= l do
+ if i > left and i < right then
+ local hi = h[i]
+ if not hi then
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ elseif hi == true then
+ n = n + 1
+ r[n] = true
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ else
+ local b = i - hi.start -- + 1 - 1
+ local e = b + hi.length - 1
+ n = b
+ r[n] = { hi.before, hi.after, concat(w,"",b,e) }
+ i = e + 1
+ end
+ else
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ end
+ end
+
+ local function serialize(s,lefthyphenchar,righthyphenchar)
+ if not s then
+ return
+ elseif s == true then
+ local n = copy_node(stop)
+ setfield(n,"char",lefthyphenchar or righthyphenchar)
+ return n
+ end
+ local h = nil
+ local c = nil
+ if lefthyphenchar then
+ h = copy_node(stop)
+ setfield(h,"char",lefthyphenchar)
+ c = h
+ end
+ if #s == 1 then
+ local n = copy_node(stop)
+ setfield(n,"char",characters[s])
+ if not h then
+ h = n
+ else
+ insert_after(c,c,n)
+ end
+ c = n
+ else
+ local t = lpegmatch(split,s)
+ for i=1,#t do
+ local n = copy_node(stop)
+ setfield(n,"char",characters[t[i]])
+ if not h then
+ h = n
+ else
+ insert_after(c,c,n)
+ end
+ c = n
+ end
+ end
+ if righthyphenchar then
+ local n = copy_node(stop)
+ insert_after(c,c,n)
+ setfield(n,"char",righthyphenchar)
+ end
+ return h
+ end
+
+ -- no grow
+
+ local current = start
+ local size = #r
+ for i=1,size do
+ local ri = r[i]
+ if ri == true then
+ local n = new_disc()
+ if righthyphenchar then
+ setfield(n,"pre",serialize(true,righthyphenchar))
+ end
+ if lefthyphenchar then
+ setfield(n,"post",serialize(true,lefthyphenchar))
+ end
+ insert_before(head,current,n)
+ elseif type(ri) == "table" then
+ local n = new_disc()
+ local pre, post, replace = ri[1], ri[2], ri[3]
+ if pre then
+ setfield(n,"pre",serialize(pre,false,righthyphenchar))
+ end
+ if post then
+ setfield(n,"post",serialize(post,lefthyphenchar,false))
+ end
+ if replace then
+ setfield(n,"replace",serialize(replace))
+ end
+ insert_before(head,current,n)
+ else
+ setfield(current,"char",characters[ri])
+ if i < size then
+ current = getnext(current)
+ end
+ end
+ end
+ if current ~= stop then
+ local current = getnext(current)
+ local last = getnext(stop)
+ while current ~= last do
+ head, current = remove_node(head,current,true)
+ end
+ end
+ end
+
+ -- simple cases: no special .. only inject
+
+ local prehyphenchar = lang.prehyphenchar
+ local posthyphenchar = lang.posthyphenchar
+
+ local lccodes = characters.lccodes
+
+ -- An experimental feature:
+ --
+ -- \setupalign[verytolerant,flushleft]
+ -- \setuplayout[width=140pt] \showframe
+ -- longword longword long word longword longwordword \par
+ -- \enabledirectives[hyphenators.rightwordsmin=1]
+ -- longword longword long word longword longwordword \par
+ -- \disabledirectives[hyphenators.rightwordsmin]
+ --
+ -- An alternative is of course to pack the words in an hbox.
+
+ local rightwordsmin = 0 -- todo: parproperties (each par has a number anyway)
+
+ function traditional.hyphenate(head)
+ local first = tonut(head)
+ local current = first
+ local dictionary = nil
+ local instance = nil
+ local characters = nil
+ local unicodes = nil
+ local language = nil
+ local start = nil
+ local stop = nil
+ local word = nil -- maybe reuse and pass size
+ local size = 0
+ local leftchar = false
+ local rightchar = false -- utfbyte("-")
+ local leftmin = 0
+ local rightmin = 0
+ local lastone = nil
+
+ if rightwordsmin > 0 then
+ lastone = node_tail(first)
+ local inword = false
+ while lastone and rightwordsmin > 0 do
+ local id = getid(lastone)
+ if id == glyph_code then
+ inword = true
+ elseif inword then
+ inword = false
+ rightwordsmin = rightwordsmin - 1
+ end
+ lastone = getprev(lastone)
+ end
+ end
+
+ while current ~= lastone do
+ local id = getid(current)
+ if id == glyph_code then
+ -- currently no lc/uc code support
+ local code = getchar(current)
+ local lang = getfield(current,"lang")
+ if lang ~= language then
+ if dictionary then
+ if leftmin + rightmin < #word then
+ local done = hyphenate(dictionary,word)
+ if done then
+ flush(first,start,stop,dictionary,word,done,leftchar,rightchar,characters,leftmin,rightmin)
+ end
+ end
+ end
+ language = lang
+ dictionary = dictionaries[language]
+ instance = dictionary.instance
+ characters = dictionary.characters
+ unicodes = dictionary.unicodes
+ leftchar = instance and posthyphenchar(instance)
+ rightchar = instance and prehyphenchar (instance)
+ leftmin = getfield(current,"left")
+ rightmin = getfield(current,"right")
+ if not leftchar or leftchar < 0 then
+ leftchar = false
+ end
+ if not rightchar or rightchar < 0 then
+ rightchar = false
+ end
+ local char = unicodes[code]
+ if char then
+ word = { char }
+ size = 1
+ start = current
+ end
+ elseif word then
+ local char = unicodes[code]
+ if char then
+ size = size + 1
+ word[size] = char
+ elseif dictionary then
+ if leftmin + rightmin < #word then
+ local done = hyphenate(dictionary,word)
+ if done then
+ flush(first,start,stop,dictionary,word,done,leftchar,rightchar,characters,leftmin,rightmin)
+ end
+ end
+ word = nil
+ end
+ else
+ local char = unicodes[code]
+ if char then
+ word = { char }
+ size = 1
+ start = current
+ -- leftmin = getfield(current,"left") -- can be an option
+ -- rightmin = getfield(current,"right") -- can be an option
+ end
+ end
+ stop = current
+ current = getnext(current)
+ elseif word then
+ if dictionary then
+ if leftmin + rightmin < #word then
+ local done = hyphenate(dictionary,word)
+ current = getnext(current)
+ if done then
+ flush(first,start,stop,dictionary,word,done,leftchar,rightchar,characters,leftmin,rightmin)
+ end
+ else
+ current = getnext(current) -- hm
+ end
+ else
+ current = getnext(current)
+ end
+ word = nil
+ elseif id == math_code then
+ current = getnext(end_of_math(current))
+ else
+ current = getnext(current)
+ end
+ end
+ return head, true
+ end
+
+ local texmethod = "builders.kernel.hyphenation"
+ local oldmethod = texmethod
+ local newmethod = texmethod
+
+ -- local newmethod = "languages.hyphenators.traditional.hyphenate"
+ --
+ -- nodes.tasks.prependaction("processors","words",newmethod)
+ -- nodes.tasks.disableaction("processors",oldmethod)
+ --
+ -- nodes.tasks.replaceaction("processors","words",oldmethod,newmethod)
+
+ -- \enabledirectives[hyphenators.method=traditional]
+ -- \enabledirectives[hyphenators.method=builtin]
+
+ directives.register("hyphenators.method",function(v)
+ if type(v) == "string" then
+ local valid = languages.hyphenators[v]
+ if valid and valid.hyphenate then
+ newmethod = "languages.hyphenators." .. v .. ".hyphenate"
+ else
+ newmethod = texmethod
+ end
+ else
+ newmethod = texmethod
+ end
+ if oldmethod ~= newmethod then
+ nodes.tasks.replaceaction("processors","words",oldmethod,newmethod)
+ end
+ oldmethod = newmethod
+ end)
+
+ -- experimental feature
+
+ directives.register("hyphenators.rightwordsmin",function(v)
+ rightwordsmin = tonumber(v) or 0
+ end)
+
+else
+
+ -- traditional.loadpatterns("nl","lang-nl")
+ -- traditional.loadpatterns("de","lang-de")
+
+ traditional.registerpattern("nl","e1ë", { start = 1, length = 2, before = "e", after = "e" } )
+ traditional.registerpattern("nl","oo1ë", { start = 2, length = 3, before = "o", after = "e" } )
+ traditional.registerpattern("de","qqxc9xkqq",{ start = 3, length = 4, before = "ab", after = "cd" } )
+
+ local specification = {
+ lefthyphenmin = 2,
+ righthyphenmin = 2,
+ lefthyphenchar = "<",
+ righthyphenchar = ">",
+ }
+
+ print("reëel", traditional.injecthyphens(dictionaries.nl,"reëel", specification),"r{e>}{<e}{eë}el")
+ print("reeëel", traditional.injecthyphens(dictionaries.nl,"reeëel", specification),"re{e>}{<e}{eë}el")
+ print("rooëel", traditional.injecthyphens(dictionaries.nl,"rooëel", specification),"r{o>}{<e}{ooë}el")
+
+ print( "qxcxkq", traditional.injecthyphens(dictionaries.de, "qxcxkq", specification),"")
+ print( "qqxcxkqq", traditional.injecthyphens(dictionaries.de, "qqxcxkqq", specification),"")
+ print( "qqqxcxkqqq", traditional.injecthyphens(dictionaries.de, "qqqxcxkqqq", specification),"")
+ print("qqqqxcxkqqqq",traditional.injecthyphens(dictionaries.de,"qqqqxcxkqqqq",specification),"")
+
+end
+
diff --git a/tex/context/base/lang-hyp.mkiv b/tex/context/base/lang-hyp.mkiv
new file mode 100644
index 000000000..0cd5a72ca
--- /dev/null
+++ b/tex/context/base/lang-hyp.mkiv
@@ -0,0 +1,109 @@
+%D \module
+%D [ file=lang-ini,
+%D version=2014.08.10,
+%D title=\CONTEXT\ Language Macros,
+%D subtitle=Experimental Patterns,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is an experimental module. We often have to deal with titles
+%D that have conflicting demands:
+%D
+%D \startitemize
+%D \startitem They go into a dedicated space (often a graphic). \stopitem
+%D \startitem The words cannot be hyphenated. \stopitem
+%D \startitem But as an escape they can get hyphenated. \stopitem
+%D \startitem In that case we want at least an extra word on the last line. \stopitem
+%D \stopitemize
+%D
+%D These and maybe more cases can be dealt with using dedicated hyphenation
+%D mechanisms. At he same time we want to experiment with more extensive patterns
+%D as discussed in {\em TUGboat, Volume 27 (2006), No. 2—Proceedings of EuroTEX2006}.
+
+% lua: 5.341 5.354
+% tex: 5.174 5.262
+
+\writestatus{loading}{ConTeXt Language Macros / Initialization}
+
+\registerctxluafile{lang-hyp}{1.001}
+
+%D This command can change! At some point we will keep the setting with the
+%D paragraph and then the \type {\par} can go.
+
+\unexpanded\def\atleastoneword#1%
+ {\begingroup
+ \enabledirectives[hyphenators.method=traditional]%
+ \enabledirectives[hyphenators.rightwordsmin=1]%
+ \lefthyphenmin \plusfour
+ \righthyphenmin\plusfour
+ #1\par
+ \disabledirectives[hyphenators.rightwordsmin]%
+ \enabledirectives[hyphenators.method]%
+ \endgroup}
+
+\endinput
+
+% \starttext
+%
+% \enabledirectives[hyphenators.method=traditional]
+%
+% % \dorecurse{1000}{\input tufte \par}
+%
+% \setupalign[verytolerant,flushleft]
+% \setuplayout[width=140pt] \showframe
+%
+% longword longword long word longword longwordword \blank
+%
+% \enabledirectives[hyphenators.rightwordsmin=1]
+%
+% longword longword long word longword longwordword\blank
+%
+% \disabledirectives[hyphenators.rightwordsmin]
+%
+% longword longword long word longword longwordword\blank
+%
+% \atleastoneword{longword longword long word longword longwordword}
+%
+% \enabledirectives[hyphenators.method=traditional]
+%
+% \stoptext
+
+% \startluacode
+% -- e1ë/e=e reëel re-eel
+% -- a1atje./a=t,1,3 omaatje oma-tje
+% -- schif1f/ff=f,5,2 Schiffahrt Schiff-fahrt
+%
+% languages.hyphenators.traditional.registerpattern("en","a1b", { start = 1, length = 2, before = "CD", after = "EF" } )
+% languages.hyphenators.traditional.registerpattern("en","e1ë", { start = 1, length = 2, before = "e", after = "e" } )
+% languages.hyphenators.traditional.registerpattern("en","oo1ë", { start = 2, length = 2, before = "o", after = "e" } )
+% languages.hyphenators.traditional.registerpattern("en","qqxc9xkqq",{ start = 3, length = 4, before = "ab", after = "cd" } ) -- replacement start length
+%
+% -- print("reëel", injecthyphens(dictionaries.nl,"reëel", 2,2))
+% -- print("reeëel", injecthyphens(dictionaries.nl,"reeëel", 2,2))
+% -- print("rooëel", injecthyphens(dictionaries.nl,"rooëel", 2,2))
+% -- print( "QXcXkQ", injecthyphens(dictionaries.de, "QXcXkQ", 2,2))
+% -- print( "QQXcXkQQ", injecthyphens(dictionaries.de, "QQXcXkQQ", 2,2))
+% -- print( "QQQXcXkQQQ", injecthyphens(dictionaries.de, "QQQXcXkQQQ", 2,2))
+% -- print("QQQQXcXkQQQQ",injecthyphens(dictionaries.de,"QQQQXcXkQQQQ",2,2))
+% --
+% -- print( "QQXcXkQQ QQXcXkQQ", injecthyphens(dictionaries.de, "QQXcXkQQ QQXcXkQQ", 2,2))
+% \stopluacode
+%
+% \starttext
+%
+% \blank
+%
+% xreëel rooëel \par xxabxx xxxabxxx \par
+%
+% \hsize1mm \lefthyphenmin2 \righthyphenmin2
+%
+% \blank Capacity \blank capacity \blank xyabxy \blank xreëel \blank rooëel \blank
+%
+% xy\discretionary{CD}{EF}{ab}xy % xxacceedxxx
+%
+% \stoptext
diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv
index 759fda78a..017152777 100644
--- a/tex/context/base/lxml-ini.mkiv
+++ b/tex/context/base/lxml-ini.mkiv
@@ -21,7 +21,6 @@
%registerctxluafile{lxml-xml}{1.001} % xml finalizers
%registerctxluafile{lxml-aux}{1.001} % extras using parser
%registerctxluafile{lxml-mis}{1.001} % extras independent of parser
-\registerctxluafile{char-ent}{1.001}
\registerctxluafile{lxml-ent}{1.001} % entity hacks
\registerctxluafile{lxml-tex}{1.001} % tex finalizers
\registerctxluafile{lxml-dir}{1.001} % ctx hacks
diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua
index 70a8ae8d6..63a0e9f88 100644
--- a/tex/context/base/math-fbk.lua
+++ b/tex/context/base/math-fbk.lua
@@ -330,7 +330,7 @@ end
-- we could move the defs from math-act here
-local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset)
+local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset,unicode)
local characters = target.characters
local olddata = characters[oldchr]
-- brrr ... pagella has only next
@@ -346,10 +346,11 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
local correction = swap and { "down", (olddata.height or 0) - height } or { "down", olddata.height + (offset or 0)}
local newdata = {
- commands = { correction, { "slot", 1, oldchr } },
- width = olddata.width,
- height = height,
- depth = depth,
+ commands = { correction, { "slot", 1, oldchr } },
+ width = olddata.width,
+ height = height,
+ depth = depth,
+ tounicode = tounicode16(unicode),
}
local glyphdata = newdata
local nextglyph = olddata.next
@@ -400,6 +401,9 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
return glyphdata, true
else
+-- if not olddata.tounicode then
+-- olddata.tounicode = tounicode16(unicode),
+-- end
return olddata, false
end
end
@@ -415,7 +419,7 @@ virtualcharacters[0x203E] = function(data) -- could be FE33E instead
height = target.parameters.xheight/4
depth = height
end
- return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth)
+ return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth,nil,nil,0x203E)
end
virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient
@@ -426,37 +430,37 @@ local function smashed(data,unicode,swap,private)
local original = data.original
local chardata = target.characters[unicode]
if chardata and chardata.height > target.parameters.xheight then
- return accent_to_extensible(target,private,original,unicode,0,0,swap)
+ return accent_to_extensible(target,private,original,unicode,0,0,swap,nil,unicode)
else
return original.characters[unicode]
end
end
-addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
virtualcharacters[0xFE3DE] = function(data) return smashed(data,0x23DE,0x23DF,0xFE3DE) end
virtualcharacters[0xFE3DC] = function(data) return smashed(data,0x23DC,0x23DD,0xFE3DC) end
virtualcharacters[0xFE3B4] = function(data) return smashed(data,0x23B4,0x23B5,0xFE3B4) end
-addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
-virtualcharacters[0xFE3DF] = function(data) return data.target.characters[0x23DF] end
-virtualcharacters[0xFE3DD] = function(data) return data.target.characters[0x23DD] end
-virtualcharacters[0xFE3B5] = function(data) return data.target.characters[0x23B5] end
+virtualcharacters[0xFE3DF] = function(data) local c = data.target.characters[0x23DF] if c then c.tounicode = tounicode16(0x23DF) return c end end
+virtualcharacters[0xFE3DD] = function(data) local c = data.target.characters[0x23DD] if c then c.tounicode = tounicode16(0x23DD) return c end end
+virtualcharacters[0xFE3B5] = function(data) local c = data.target.characters[0x23B5] if c then c.tounicode = tounicode16(0x23B5) return c end end
-- todo: add some more .. numbers might change
-addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h" } )
-addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h" } )
+addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h", mathclass = "topaccent" } )
local function smashed(data,unicode,private)
local target = data.target
local height = target.parameters.xheight / 2
- local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
+ local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height,unicode)
if done then
c.top_accent = nil -- or maybe also all the others
end
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 9772ce538..4cfa02e4e 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -24,6 +24,7 @@ local commands = commands
local context_sprint = context.sprint
----- context_fprint = context.fprint -- a bit inefficient
+local ctx_doifelse = commands.doifelse
local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
@@ -464,7 +465,7 @@ end
local function utfmathfiller(chr, default)
local cd = somechar[chr]
- local cmd = cd and (cd.mathfiller or cd.mathname)
+ local cmd = cd and cd.mathfiller -- or cd.mathname
return cmd or default or ""
end
@@ -481,15 +482,46 @@ function commands.utfmathstretch(...) context(utfmathstretch(...)) end
function commands.utfmathcommand(...) context(utfmathcommand(...)) end
function commands.utfmathfiller (...) context(utfmathfiller (...)) end
-function commands.doifelseutfmathaccent(chr,asked)
- commands.doifelse(utfmathaccent(chr,nil,asked))
+function commands.utfmathcommandabove(asked)
+ local c = utfmathcommand(asked,nil,"topaccent","over" )
+ if c ~= "" then
+ context(c)
+ end
+end
+
+function commands.utfmathcommandbelow (asked)
+ local c = utfmathcommand(asked,nil,"botaccent","under")
+ if c ~= "" then
+ context(c)
+ end
end
-function commands.utfmathcommandabove(asked) context(utfmathcommand(asked,nil,"topaccent","over" )) end
-function commands.utfmathcommandbelow(asked) context(utfmathcommand(asked,nil,"botaccent","under")) end
+function commands.utfmathcommandfiller(asked)
+ local c = utfmathfiller(asked,nil)
+ if c ~= "" then
+ context(c)
+ end
+end
-function commands.doifelseutfmathabove(chr) commands.doifelse(utfmathaccent(chr,nil,"topaccent","over" )) end
-function commands.doifelseutfmathbelow(chr) commands.doifelse(utfmathaccent(chr,nil,"botaccent","under")) end
+function commands.doifelseutfmathabove(chr)
+ local c = utfmathaccent(chr,nil,"topaccent","over")
+ ctx_doifelse(c and c ~= "")
+end
+
+function commands.doifelseutfmathbelow(chr)
+ local c = utfmathaccent(chr,nil,"botaccent","under")
+ ctx_doifelse(c and c ~= "")
+end
+
+function commands.doifelseutfmathaccent(chr,asked)
+ local c = utfmathaccent(chr,nil,asked)
+ ctx_doifelse(c and c ~= "")
+end
+
+function commands.doifelseutfmathfiller(chr)
+ local c = utfmathfiller(chr,nil)
+ ctx_doifelse(c and c ~= "")
+end
-- helpers
--
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index a2f481df2..a7b2a924c 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -301,8 +301,9 @@
\def\utfmathclassfiltered #1#2{\ctxcommand{utfmathclass (\!!bs#1\!!es,nil,"#2")}}
\def\utfmathcommandfiltered#1#2{\ctxcommand{utfmathcommand(\!!bs#1\!!es,nil,"#2")}}
-\def\utfmathcommandabove#1{\ctxcommand{utfmathcommandabove(\!!bs#1\!!es)}}
-\def\utfmathcommandbelow#1{\ctxcommand{utfmathcommandbelow(\!!bs#1\!!es)}}
+\def\utfmathcommandabove #1{\ctxcommand{utfmathcommandabove (\!!bs#1\!!es)}}
+\def\utfmathcommandbelow #1{\ctxcommand{utfmathcommandbelow (\!!bs#1\!!es)}}
+\def\utfmathcommandfiller#1{\ctxcommand{utfmathcommandfiller(\!!bs#1\!!es)}}
\unexpanded\def\doifelseutfmathaccent #1{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es)}}
\unexpanded\def\doifelseutfmathaccentfiltered#1#2{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es,"#2")}}
@@ -310,6 +311,8 @@
\unexpanded\def\doifelseutfmathabove #1{\ctxcommand{doifelseutfmathabove(\!!bs#1\!!es)}}
\unexpanded\def\doifelseutfmathbelow #1{\ctxcommand{doifelseutfmathbelow(\!!bs#1\!!es)}}
+\unexpanded\def\doifelseutfmathfiller #1{\ctxcommand{doifelseutfmathfiller(\!!bs#1\!!es)}}
+
%D Not used that much:
\installcorenamespace{mathcodecommand}
diff --git a/tex/context/base/math-int.mkiv b/tex/context/base/math-int.mkiv
index 6b480961b..6b65738ff 100644
--- a/tex/context/base/math-int.mkiv
+++ b/tex/context/base/math-int.mkiv
@@ -13,6 +13,8 @@
\writestatus{loading}{ConTeXt Math Macros / Integrals}
+% todo: int and sum etc can be stackers
+
\unprotect
%D \startbuffer
diff --git a/tex/context/base/math-stc.mkvi b/tex/context/base/math-stc.mkvi
index 140d0244b..ca39287c5 100644
--- a/tex/context/base/math-stc.mkvi
+++ b/tex/context/base/math-stc.mkvi
@@ -16,6 +16,8 @@
\unprotect
+%D WARNING: If the code here changes, the export needs to be checked!
+
%D At some point the \MKII\ arrow mechanism has been converted to \MKIV, but we kept
%D most of the logic. We now have a more generic variant dealing with extensibles.
%D There are a few demands than we need to meet:
@@ -78,7 +80,7 @@
{\mathstylehbox{\usemathstackerscolorparameter\c!color
\Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}}
-% these delimiters are a unuseable as theu don't center for small arguments:
+% these delimiters are a unuseable as they don't center for small arguments:
%
% $\Umathaccent 0 0 "2190{x}$ \par $\Umathaccent 0 0 "27F8{x}$\par
% $\Udelimiterunder 0 "2190{x}$ \par $\Udelimiterunder 0 "27F8{x}$\par
@@ -121,6 +123,18 @@
\def\math_stackers_skip_indeed#amount%
{\filledhboxk{\unsetteststrut\strut\hskip#amount}} % \dontshowstruts
+\let\math_stackers_start_tagged_mid\relax
+\let\math_stackers_start_tagged_top\relax
+\let\math_stackers_start_tagged_bot\relax
+\let\math_stackers_stop_tagged \relax
+
+\appendtoks
+ \def\math_stackers_start_tagged_mid{\dostarttagged\t!mathstackermid\empty\hbox\bgroup}%
+ \def\math_stackers_start_tagged_top{\dostarttagged\t!mathstackertop\empty\hbox\bgroup}%
+ \def\math_stackers_start_tagged_bot{\dostarttagged\t!mathstackerbot\empty\hbox\bgroup}%
+ \def\math_stackers_stop_tagged {\egroup\dostoptagged}%
+\to \everysetuptagging
+
%D We define a full featured command handler.
\installcorenamespace {mathstackers}
@@ -139,6 +153,7 @@
\c!mpoffset=.25\exheight,
\c!voffset=.25\exheight,
\c!hoffset=.5\emwidth,
+ \c!distance=\mathstackersparameter\c!voffset, % distance between symbol and base (can be different from voffset)
\c!minheight=\exheight,
\c!mindepth=\zeropoint,
\c!minwidth=\emwidth,
@@ -264,6 +279,7 @@
{\begingroup
\edef\currentmathstackers{#category}%
\mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
{\edef\p_offset {\mathstackersparameter\c!offset}%
\edef\p_location {\mathstackersparameter\c!location}%
@@ -311,7 +327,11 @@
\fi
\scratchwidth\wd
\ifdim\wd\scratchboxone>\wd\scratchboxtwo
- \scratchboxone
+ \ifdim\wd\scratchboxone>\wd\scratchboxthree
+ \scratchboxone
+ \else
+ \scratchboxthree
+ \fi
\else\ifdim\wd\scratchboxtwo>\wd\scratchboxthree
\scratchboxtwo
\else
@@ -327,7 +347,9 @@
\advance\scratchwidth2\scratchhoffset
%
\ifcase#method\relax
+ \dostarttagged\t!mathstackermid\empty
\setbox\scratchboxthree\csname\??mathstackersalternative\p_alternative\endcsname
+ \dostoptagged
\fi
%
\ifdim\wd\scratchboxone<\scratchwidth
@@ -371,10 +393,13 @@
%
\math_stackers_normalize_three
%
+ \math_stackers_start_tagged_mid
\math_stackers_middle\bgroup
\box\scratchboxthree
\egroup
+ \math_stackers_stop_tagged
%
+ \math_stackers_start_tagged_top
\ifdim\htdp\scratchboxone>\zeropoint
\scratchoffset\scratchvoffset
\kern-\scratchwidth
@@ -383,7 +408,9 @@
\box\scratchboxone
\egroup
\fi
+ \math_stackers_stop_tagged
%
+ \math_stackers_start_tagged_bot
\ifdim\htdp\scratchboxtwo>\zeropoint
\scratchoffset\scratchvoffset
\kern-\scratchwidth
@@ -391,7 +418,9 @@
\lower\dimexpr\ht\scratchboxtwo+\scratchdepth+\scratchoffset+\scratchbottomoffset\relax
\box\scratchboxtwo
\egroup
- \fi}%
+ \fi
+ \math_stackers_stop_tagged}%
+ \dostoptagged
\mathstackersparameter\c!right\relax
\endgroup}
@@ -453,6 +482,7 @@
{\begingroup
\edef\currentmathstackers{#category}%
\mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
{\edef\currentmathstackers{#category}%
\edef\m_math_stackers_text_middle {#text}%
@@ -480,37 +510,51 @@
\fi
\advance\scratchwidth2\scratchhoffset
%
- \setbox\scratchboxtwo \csname\??mathstackersalternative\p_alternative\endcsname
- \setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}%
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ \setbox\scratchboxthree\hbox
+ to \scratchwidth{\hss\box\scratchboxthree\hss}%
%
\math_stackers_normalize_three
%
+ \math_stackers_start_tagged_mid
\math_stackers_middle\bgroup
\box\scratchboxthree
\egroup
+ \math_stackers_stop_tagged
%
\ifdim\htdp\scratchboxtwo>\zeropoint
\kern-\scratchwidth
+ \math_stackers_start_tagged_top
\ifcase#top\else
\math_stackers_top\bgroup
- % \raise\dimexpr\scratchheight+\scratchtopoffset\relax
- \raise\dimexpr\scratchheight+\mathstackersparameter\c!voffset\relax
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxtwo % new
+ +\mathstackersparameter\c!distance % was \c!voffset
+ \relax
\box\scratchboxtwo
\egroup
\fi
+ \math_stackers_stop_tagged
\scratchunicode#codeextra\relax
+ \math_stackers_start_tagged_bot
\ifcase\scratchunicode\else
\kern-\scratchwidth
\setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
\fi
\ifcase#bottom\else
\math_stackers_bottom\bgroup
- % \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax
- \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\mathstackersparameter\c!voffset\relax
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxtwo
+ +\mathstackersparameter\c!distance % was \c!voffset
+ \relax
\box\scratchboxtwo
\egroup
\fi
+ \math_stackers_stop_tagged
\fi}%
+ \dostoptagged
\mathstackersparameter\c!right\relax
\edef\p_limits{\mathstackersparameter\c!mathlimits}%
\ifx\p_limits\v!yes
@@ -551,14 +595,176 @@
\def\math_stackers_handle_over[#category]%
{\math_stackers_direct_double\plusone\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on
-\def\math_stackers_handle_under[#category]#codepoint#bottomtext%
+\def\math_stackers_handle_under[#category]%
{\math_stackers_direct_double\zerocount\plusone{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
-\def\math_stackers_handle_double[#category]#codepoint#bottomtext%
+\def\math_stackers_handle_double[#category]%
{\math_stackers_direct_double\plusone\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
\def\math_stackers_direct_double#top#bottom#category#codepoint#text%
- {\math_stackers_make_double#top#bottom{#category}{#codepoint}{#text}%
+ {\math_stackers_make_double#top#bottom{#category}{#codepoint}{0}{#text}%
+ \endgroup}
+
+%D A relative new one is a combination of accents and text (as needed in mathml):
+
+\unexpanded\def\math_stackers_make_double_text#where#category#codepoint#text#extra%
+ {\begingroup
+ \edef\currentmathstackers{#category}%
+ \mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
+ \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
+ {\edef\currentmathstackers{#category}%
+ %
+ \edef\p_offset {\mathstackersparameter\c!offset}%
+ \edef\p_location {\mathstackersparameter\c!location}%
+ \edef\p_strut {\mathstackersparameter\c!strut}%
+ \edef\p_alternative{\mathstackersparameter\c!alternative}%
+ %
+ \scratchleftoffset \zeropoint
+ \scratchrightoffset\zeropoint
+ %
+ \edef\m_math_stackers_text_middle{#text}%
+ \math_stackers_check_unicode{#codepoint}%
+ \scratchunicode#codepoint\relax
+ %
+ \ifx\math_stackers_middle\empty
+ \setbox\scratchboxthree\emptyhbox
+ \else
+ \setmathtextbox\scratchboxthree\hbox{\math_stackers_middletext}%
+ \fi
+ %
+ \ifcase#where\relax
+ \edef\m_math_stackers_text_top{#extra}%
+ \ifx\math_stackers_top\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_toptext}%
+ \fi
+ \else
+ \edef\m_math_stackers_text_bottom{#extra}%
+ \ifx\math_stackers_bottom\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_bottomtext}%
+ \fi
+ \fi
+ %
+ \scratchwidth\wd
+ \ifdim\wd\scratchboxone>\wd\scratchboxthree
+ \scratchboxone
+ \else
+ \scratchboxthree
+ \fi
+ \relax
+ \scratchdimen\mathstackersparameter\c!minwidth\relax
+ \ifdim\scratchwidth<\scratchdimen
+ \scratchwidth\scratchdimen
+ \fi
+ \advance\scratchwidth2\scratchhoffset
+ %
+ \ifdim\wd\scratchboxone<\scratchwidth
+ \setbox\scratchboxone\hbox to \scratchwidth{\hss\unhbox\scratchboxone\hss}%
+ \fi
+ \ifdim\wd\scratchboxthree<\scratchwidth
+ \setbox\scratchboxthree\hbox to \scratchwidth{\hss\unhbox\scratchboxthree\hss}%
+ \fi
+ %
+ \math_stackers_normalize_three
+ %
+ \math_stackers_start_tagged_mid
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
+ \egroup
+ \math_stackers_stop_tagged
+ %
+ \kern-\scratchwidth
+ \ifcase#where\relax
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ %
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxone
+ +\mathstackersparameter\c!voffset
+ \relax
+ \box\scratchboxone % toptext
+ \egroup
+ \math_stackers_stop_tagged
+ \kern-\scratchwidth
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxtwo
+ +\mathstackersparameter\c!distance
+ \relax
+ \box\scratchboxtwo % accent
+ \egroup
+ \math_stackers_stop_tagged
+ \else
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ %
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxtwo % new
+ +\mathstackersparameter\c!distance
+ \relax
+ \box\scratchboxtwo % accent
+ \egroup
+ \math_stackers_stop_tagged
+ \kern-\scratchwidth
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxone
+ +\mathstackersparameter\c!voffset
+ \relax
+ \box\scratchboxone % bottext
+ \egroup
+ \math_stackers_stop_tagged
+ \fi
+ }%
+ \dostoptagged
+ \mathstackersparameter\c!right\relax
+ \edef\p_limits{\mathstackersparameter\c!mathlimits}%
+ \ifx\p_limits\v!yes
+ \expandafter\endgroup\expandafter\limits
+ \else
+ \expandafter\endgroup
+ \fi}
+
+\unexpanded\def\definemathovertextextensible {\dotripleempty\math_extensibles_define_over_text }
+\unexpanded\def\definemathundertextextensible{\dotripleempty\math_extensibles_define_under_text}
+
+\def\math_extensibles_define_over_text[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_make_double_text\plusone {#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double_text\plusone \noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\def\math_extensibles_define_under_text[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_make_double_text\zerocount{#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double_text\zerocount\noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\unexpanded\def\mathovertext {\begingroup\dosingleempty\math_stackers_handle_over_text }
+\unexpanded\def\mathundertext{\begingroup\dosingleempty\math_stackers_handle_under_text }
+
+\def\math_stackers_handle_over_text[#category]%
+ {\math_stackers_direct_double_text\plusone {\iffirstargument#category\else\v!top \fi}} % will be defined later on
+
+\def\math_stackers_handle_under_text[#category]%
+ {\math_stackers_direct_double_text\zerocount{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
+
+\def\math_stackers_direct_double_text#where#category#codepoint#text#extra%%
+ {\math_stackers_make_double_text#where{#category}{#codepoint}{#text}{#extra}%
\endgroup}
%D Here is a bonus macro that takes three texts. It can be used to get consistent
@@ -654,11 +860,23 @@
[\v!both]
\definemathstackers
- [vfenced]
+ [\v!vfenced]
[\v!both]
[\c!mathclass=\s!ord,
\c!mathlimits=\v!yes]
+% these are needed for mathml:
+
+% \setupmathstackers
+% [\v!both]
+% [\c!hoffset=1pt,
+% \c!voffset=1pt]
+
+\definemathstackers
+ [\v!bothtext]
+ [\v!both]
+ [\c!strut=\v!yes]
+
% These are compatibity definitions, math only.
% todo: top= bottom= middle= is nicer (compare math-fen)
@@ -761,6 +979,15 @@
\definemathextensible [\v!mathematics] [mrightleftharpoons] ["21CC]
\definemathextensible [\v!mathematics] [mtriplerel] ["2261]
+\definemathextensible [\v!mathematics] [eleftarrowfill] ["2190] % ["27F5]
+\definemathextensible [\v!mathematics] [erightarrowfill] ["2192] % ["27F6]
+\definemathextensible [\v!mathematics] [eleftrightarrowfill] ["27F7]
+\definemathextensible [\v!mathematics] [etwoheadrightarrowfill] ["27F9]
+\definemathextensible [\v!mathematics] [eleftharpoondownfill] ["21BD]
+\definemathextensible [\v!mathematics] [eleftharpoonupfill] ["21BC]
+\definemathextensible [\v!mathematics] [erightharpoondownfill] ["21C1]
+\definemathextensible [\v!mathematics] [erightharpoonupfill] ["21C0]
+
\definemathextensible [\v!text] [trel] ["002D]
\definemathextensible [\v!text] [tequal] ["003D]
\definemathextensible [\v!text] [tmapsto] ["21A6]
@@ -819,23 +1046,39 @@
% alternatively we can move the original to FE*
\definemathoverextensible [vfenced] [overbar] ["FE33E] % ["203E]
-\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
+\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
\definemathdoubleextensible [vfenced] [doublebar] ["FE33E] ["FE33F]
\definemathoverextensible [vfenced] [overbrace] ["FE3DE] % ["023DE]
-\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
+\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
\definemathdoubleextensible [vfenced] [doublebrace] ["FE3DE] ["FE3DF]
\definemathoverextensible [vfenced] [overparent] ["FE3DC] % ["023DC]
-\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
+\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
\definemathdoubleextensible [vfenced] [doubleparent] ["FE3DC] ["FE3DD]
\definemathoverextensible [vfenced] [overbracket] ["FE3B4] % ["023B4]
-\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
+\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
\definemathdoubleextensible [vfenced] [doublebracket] ["FE3B4] ["FE3B5]
% \unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
+%D For mathml:
+
+\definemathdoubleextensible [both] [overbarunderbar] ["FE33E] ["FE33F]
+\definemathdoubleextensible [both] [overbraceunderbrace] ["FE3DE] ["FE3DF]
+\definemathdoubleextensible [both] [overparentunderparent] ["FE3DC] ["FE3DD]
+\definemathdoubleextensible [both] [overbracketunderbracket] ["FE3B4] ["FE3B5]
+
+\definemathovertextextensible [bothtext] [overbartext] ["FE33E]
+\definemathundertextextensible [bothtext] [underbartext] ["FE33F]
+\definemathovertextextensible [bothtext] [overbracetext] ["FE3DE]
+\definemathundertextextensible [bothtext] [underbracetext] ["FE3DF]
+\definemathovertextextensible [bothtext] [overparenttext] ["FE3DC]
+\definemathundertextextensible [bothtext] [underparenttext] ["FE3DD]
+\definemathovertextextensible [bothtext] [overbrackettext] ["FE3B4]
+\definemathundertextextensible [bothtext] [underbrackettext] ["FE3B5]
+
%D Some bonus ones (for the moment here):
\definemathstackers
diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua
index 77c182942..638c4629c 100644
--- a/tex/context/base/math-tag.lua
+++ b/tex/context/base/math-tag.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['math-tag'] = {
license = "see context related readme files"
}
+-- todo: have a local list with local tags that then get appended
+
-- use lpeg matchers
local find, match = string.find, string.match
@@ -22,6 +24,7 @@ local getid = nuts.getid
local getchar = nuts.getchar
local getlist = nuts.getlist
local getfield = nuts.getfield
+local getsubtype = nuts.getsubtype
local getattr = nuts.getattr
local setattr = nuts.setattr
@@ -43,15 +46,30 @@ local math_style_code = nodecodes.style -- attr style
local math_choice_code = nodecodes.choice -- attr display text script scriptscript
local math_fence_code = nodecodes.fence -- attr subtype
+local accentcodes = nodes.accentcodes
+
+local math_fixed_top = accentcodes.fixedtop
+local math_fixed_bottom = accentcodes.fixedbottom
+local math_fixed_both = accentcodes.fixedboth
+
+local kerncodes = nodes.kerncodes
+
+local fontkern_code = kerncodes.fontkern
+
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local math_code = nodecodes.math
local a_tagged = attributes.private('tagged')
+local a_taggedpar = attributes.private('taggedpar')
local a_exportstatus = attributes.private('exportstatus')
local a_mathcategory = attributes.private('mathcategory')
local a_mathmode = attributes.private('mathmode')
+local a_fontkern = attributes.private('fontkern')
local tags = structures.tags
@@ -67,6 +85,12 @@ local mathcodes = mathematics.codes
local ordinary_code = mathcodes.ordinary
local variable_code = mathcodes.variable
+local fromunicode16 = fonts.mappings.fromunicode16
+local font_of_family = node.family_font
+local fontcharacters = fonts.hashes.characters
+
+local report_tags = logs.reporter("structure","tags")
+
local process
local function processsubsup(start)
@@ -104,253 +128,363 @@ end
-- todo: variants -> original
local actionstack = { }
+local fencesstack = { }
+
+local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match
+local splittag = C(P(1-S(":-"))^1) * (P(":") * C((1-P("-"))^1) + Cc(""))
+
+-- glyph nodes and such can happen in under and over stuff
+
+local detail_accent = { detail = "accent" }
+
+local function getunicode(n) -- instead of getchar
+ local char = getchar(n)
+ local font = font_of_family(getfield(n,"fam")) -- font_of_family
+ local data = fontcharacters[font][char]
+ local unic = data.tounicode
+ return unic and fromunicode16(unic) or char
+end
process = function(start) -- we cannot use the processor as we have no finalizers (yet)
+ local mtexttag = nil
while start do
local id = getid(start)
- if id == math_char_code then
- local char = getchar(start)
- -- check for code
- local a = getattr(start,a_mathcategory)
- if a then
- a = { detail = a }
+ if id == glyph_code or id == disc_code then
+ if not mtexttag then
+ mtexttag = start_tagged("mtext")
end
- local code = getmathcode(char)
- if code then
- code = code[1]
+ setattr(start,a_tagged,mtexttag)
+ elseif mtexttag and id == kern_code and (getsubtype(start) == fontkern_code or getattr(start,a_fontkern)) then
+ setattr(start,a_tagged,mtexttag)
+ else
+ if mtexttag then
+ stop_tagged()
+ mtexttag = nil
end
- local tag
- if code == ordinary_code or code == variable_code then
- local ch = chardata[char]
- local mc = ch and ch.mathclass
- if mc == "number" then
- tag = "mn"
- elseif mc == "variable" or not mc then -- variable is default
- tag = "mi"
+ if id == math_char_code then
+ local char = getchar(start)
+ -- check for code
+ local a = getattr(start,a_mathcategory)
+ if a then
+ a = { detail = a }
+ end
+ local code = getmathcode(char)
+ if code then
+ code = code[1]
+ end
+ local tag
+ if code == ordinary_code or code == variable_code then
+ local ch = chardata[char]
+ local mc = ch and ch.mathclass
+ if mc == "number" then
+ tag = "mn"
+ elseif mc == "variable" or not mc then -- variable is default
+ tag = "mi"
+ else
+ tag = "mo"
+ end
else
tag = "mo"
end
- else
- tag = "mo"
- end
- setattr(start,a_tagged,start_tagged(tag,a))
- stop_tagged()
- break -- okay?
- elseif id == math_textchar_code then
- -- check for code
- local a = getattr(start,a_mathcategory)
- if a then
- setattr(start,a_tagged,start_tagged("ms",{ detail = a }))
- else
- setattr(start,a_tagged,start_tagged("ms"))
- end
- stop_tagged()
- break
- elseif id == math_delim_code then
- -- check for code
- setattr(start,a_tagged,start_tagged("mo"))
- stop_tagged()
- break
- elseif id == math_style_code then
- -- has a next
- elseif id == math_noad_code then
- processsubsup(start)
- elseif id == math_box_code or id == hlist_code or id == vlist_code then
- -- keep an eye on math_box_code and see what ends up in there
- local attr = getattr(start,a_tagged)
- local last = attr and taglist[attr]
- if last and find(last[#last],"formulacaption[:%-]") then
- -- leave alone, will nicely move to the outer level
- else
- local text = start_tagged("mtext")
- setattr(start,a_tagged,text)
- local list = getfield(start,"list")
- if not list then
- -- empty list
- elseif not attr then
- -- box comes from strange place
- set_attributes(list,a_tagged,text)
+ setattr(start,a_tagged,start_tagged(tag,a))
+ stop_tagged()
+ break -- okay?
+ elseif id == math_textchar_code then -- or id == glyph_code
+ -- check for code
+ local a = getattr(start,a_mathcategory)
+ if a then
+ setattr(start,a_tagged,start_tagged("ms",{ detail = a })) -- mtext
else
- -- Beware, the first node in list is the actual list so we definitely
- -- need to nest. This approach is a hack, maybe I'll make a proper
- -- nesting feature to deal with this at another level. Here we just
- -- fake structure by enforcing the inner one.
- local tagdata = taglist[attr]
- local common = #tagdata + 1
- local function runner(list) -- quite inefficient
- local cache = { } -- we can have nested unboxed mess so best local to runner
- for n in traverse_nodes(list) do
- local id = getid(n)
- local aa = getattr(n,a_tagged)
- if aa then
- local ac = cache[aa]
- if not ac then
- local tagdata = taglist[aa]
- local extra = #tagdata
- if common <= extra then
- for i=common,extra do
- ac = restart_tagged(tagdata[i]) -- can be made faster
- end
- for i=common,extra do
- stop_tagged() -- can be made faster
+ setattr(start,a_tagged,start_tagged("ms")) -- mtext
+ end
+ stop_tagged()
+ break
+ elseif id == math_delim_code then
+ -- check for code
+ setattr(start,a_tagged,start_tagged("mo"))
+ stop_tagged()
+ break
+ elseif id == math_style_code then
+ -- has a next
+ elseif id == math_noad_code then
+ processsubsup(start)
+ elseif id == math_box_code or id == hlist_code or id == vlist_code then
+ -- keep an eye on math_box_code and see what ends up in there
+ local attr = getattr(start,a_tagged)
+ local last = attr and taglist[attr]
+ local tag, detail
+ if last then
+ local fulltag = last[#last]
+ tag, detail = lpegmatch(splittag,fulltag)
+ end
+ if tag == "formulacaption" then
+ -- skip
+ elseif tag == "mstacker" then
+ local list = getfield(start,"list")
+ if list then
+ process(list)
+ end
+ else
+ if tag ~= "mstackertop" and tag ~= "mstackermid" and tag ~= "mstackerbot" then
+ tag = "mtext"
+ end
+ local text = start_tagged(tag)
+ setattr(start,a_tagged,text)
+ local list = getfield(start,"list")
+ if not list then
+ -- empty list
+ elseif not attr then
+ -- box comes from strange place
+ set_attributes(list,a_tagged,text) -- only the first node ?
+ else
+ -- Beware, the first node in list is the actual list so we definitely
+ -- need to nest. This approach is a hack, maybe I'll make a proper
+ -- nesting feature to deal with this at another level. Here we just
+ -- fake structure by enforcing the inner one.
+ --
+ -- todo: have a local list with local tags that then get appended
+ --
+ local tagdata = taglist[attr] or { }
+ local common = #tagdata + 1
+ local function runner(list,depth) -- quite inefficient
+ local cache = { } -- we can have nested unboxed mess so best local to runner
+ local keep = nil
+ -- local keep = { } -- win case we might need to move keep outside
+ for n in traverse_nodes(list) do
+ local id = getid(n)
+ local mth = id == math_code and getsubtype(n)
+ if mth == 0 then
+ -- insert(keep,text)
+ keep = text
+ text = start_tagged("mrow")
+ common = common + 1
+ end
+ local aa = getattr(n,a_tagged)
+ if aa then
+ local ac = cache[aa]
+ if not ac then
+ local tagdata = taglist[aa]
+ local extra = #tagdata
+ if common <= extra then
+ for i=common,extra do
+ ac = restart_tagged(tagdata[i]) -- can be made faster
+ end
+ for i=common,extra do
+ stop_tagged() -- can be made faster
+ end
+ else
+ ac = text
end
- else
- ac = text
+ cache[aa] = ac
end
- cache[aa] = ac
+ setattr(n,a_tagged,ac)
+ else
+ setattr(n,a_tagged,text)
+ end
+
+ if id == hlist_code or id == vlist_code then
+ runner(getlist(n),depth+1)
+ elseif id == glyph_code then
+ runner(getfield(n,"components"),depth+1) -- this should not be needed
+ elseif id == disc_node then
+ runner(getfield(n,"pre"),depth+1) -- idem
+ runner(getfield(n,"post"),depth+1) -- idem
+ runner(getfield(n,"replace"),depth+1) -- idem
+ end
+ if mth == 1 then
+ stop_tagged()
+ -- text = remove(keep)
+ text = keep
+ common = common - 1
end
- setattr(n,a_tagged,ac)
- else
- setattr(n,a_tagged,text)
- end
- if id == hlist_code or id == vlist_code then
- runner(getlist(n))
end
end
+ runner(list,0)
end
- runner(list)
+ stop_tagged()
end
- stop_tagged()
- end
- elseif id == math_sub_code then
- local list = getfield(start,"list")
- if list then
- local attr = getattr(start,a_tagged)
- local last = attr and taglist[attr]
- local action = last and match(last[#last],"maction:(.-)%-")
- if action and action ~= "" then
- if actionstack[#actionstack] == action then
+ elseif id == math_sub_code then -- normally a hbox
+ local list = getfield(start,"list")
+ if list then
+ local attr = getattr(start,a_tagged)
+ local last = attr and taglist[attr]
+ if last then
+ local fulltag = last[#last]
+ local tag, detail = lpegmatch(splittag,fulltag)
+ if tag == "maction" then
+ if detail == "" then
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ elseif actionstack[#actionstack] == action then
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ else
+ insert(actionstack,action)
+ setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
+ process(list)
+ stop_tagged()
+ remove(actionstack)
+ end
+ elseif tag == "mstacker" then -- or tag == "mstackertop" or tag == "mstackermid" or tag == "mstackerbot" then
+ setattr(start,a_tagged,start_tagged(tag))
+ process(list)
+ stop_tagged()
+ else
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ end
+ else -- never happens, we're always document
setattr(start,a_tagged,start_tagged("mrow"))
process(list)
stop_tagged()
- else
- insert(actionstack,action)
- setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
- process(list)
- stop_tagged()
- remove(actionstack)
end
- else
- setattr(start,a_tagged,start_tagged("mrow"))
- process(list)
- stop_tagged()
end
- end
- elseif id == math_fraction_code then
- local num = getfield(start,"num")
- local denom = getfield(start,"denom")
- local left = getfield(start,"left")
- local right = getfield(start,"right")
- if left then
- setattr(left,a_tagged,start_tagged("mo"))
- process(left)
- stop_tagged()
- end
- setattr(start,a_tagged,start_tagged("mfrac"))
- process(num)
- process(denom)
- stop_tagged()
- if right then
- setattr(right,a_tagged,start_tagged("mo"))
- process(right)
+ elseif id == math_fraction_code then
+ local num = getfield(start,"num")
+ local denom = getfield(start,"denom")
+ local left = getfield(start,"left")
+ local right = getfield(start,"right")
+ if left then
+ setattr(left,a_tagged,start_tagged("mo"))
+ process(left)
+ stop_tagged()
+ end
+ setattr(start,a_tagged,start_tagged("mfrac"))
+ process(num)
+ process(denom)
stop_tagged()
- end
- elseif id == math_choice_code then
- local display = getfield(start,"display")
- local text = getfield(start,"text")
- local script = getfield(start,"script")
- local scriptscript = getfield(start,"scriptscript")
- if display then
- process(display)
- end
- if text then
- process(text)
- end
- if script then
- process(script)
- end
- if scriptscript then
- process(scriptscript)
- end
- elseif id == math_fence_code then
- local delim = getfield(start,"delim")
- local subtype = getfield(start,"subtype")
- -- setattr(start,a_tagged,start_tagged("mfenced")) -- needs checking
- if subtype == 1 then
- -- left
- if delim then
- setattr(start,a_tagged,start_tagged("mleft"))
- process(delim)
+ if right then
+ setattr(right,a_tagged,start_tagged("mo"))
+ process(right)
stop_tagged()
end
- elseif subtype == 2 then
- -- middle
- if delim then
- setattr(start,a_tagged,start_tagged("mmiddle"))
- process(delim)
+ elseif id == math_choice_code then
+ local display = getfield(start,"display")
+ local text = getfield(start,"text")
+ local script = getfield(start,"script")
+ local scriptscript = getfield(start,"scriptscript")
+ if display then
+ process(display)
+ end
+ if text then
+ process(text)
+ end
+ if script then
+ process(script)
+ end
+ if scriptscript then
+ process(scriptscript)
+ end
+ elseif id == math_fence_code then
+ local delim = getfield(start,"delim")
+ local subtype = getfield(start,"subtype")
+ if subtype == 1 then
+ -- left
+ local properties = { }
+ insert(fencesstack,properties)
+ setattr(start,a_tagged,start_tagged("mfenced",nil,properties)) -- needs checking
+ if delim then
+ start_tagged("ignore")
+ properties.left = getfield(delim,"small_char")
+ process(delim)
+ stop_tagged()
+ end
+ elseif subtype == 2 then
+ -- middle
+ if delim then
+ start_tagged("ignore")
+ fencesstack[#fencesstack].middle = getfield(delim,"small_char")
+ process(delim)
+ stop_tagged()
+ end
+ elseif subtype == 3 then
+ local properties = remove(fencesstack)
+ if not properties then
+ report_tags("missing right fence")
+ properties = { }
+ end
+ if delim then
+ start_tagged("ignore")
+ properties.right = getfield(delim,"small_char")
+ process(delim)
+ stop_tagged()
+ end
stop_tagged()
+ else
+ -- can't happen
end
- elseif subtype == 3 then
- if delim then
- setattr(start,a_tagged,start_tagged("mright"))
- process(delim)
+ elseif id == math_radical_code then
+ local left = getfield(start,"left")
+ local degree = getfield(start,"degree")
+ if left then
+ start_tagged("ignore")
+ process(left) -- root symbol, ignored
stop_tagged()
end
- else
- -- can't happen
- end
- -- stop_tagged()
- elseif id == math_radical_code then
- local left = getfield(start,"left")
- local degree = getfield(start,"degree")
- if left then
- start_tagged("ignore")
- process(left) -- root symbol, ignored
- stop_tagged()
- end
- if degree then -- not good enough, can be empty mlist
- setattr(start,a_tagged,start_tagged("mroot"))
- processsubsup(start)
- process(degree)
- stop_tagged()
- else
- setattr(start,a_tagged,start_tagged("msqrt"))
- processsubsup(start)
- stop_tagged()
- end
- elseif id == math_accent_code then
- local accent = getfield(start,"accent")
- local bot_accent = getfield(start,"bot_accent")
- if bot_accent then
- if accent then
- setattr(start,a_tagged,start_tagged("munderover",{ detail = "accent" }))
+ if degree then -- not good enough, can be empty mlist
+ setattr(start,a_tagged,start_tagged("mroot"))
processsubsup(start)
- process(bot_accent)
- process(accent)
+ process(degree)
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("munder",{ detail = "accent" }))
+ setattr(start,a_tagged,start_tagged("msqrt"))
processsubsup(start)
- process(bot_accent)
stop_tagged()
end
- elseif accent then
- setattr(start,a_tagged,start_tagged("mover",{ detail = "accent" }))
- processsubsup(start)
- process(accent)
+ elseif id == math_accent_code then
+ local accent = getfield(start,"accent")
+ local bot_accent = getfield(start,"bot_accent")
+ local subtype = getsubtype(start)
+ if bot_accent then
+ if accent then
+ setattr(start,a_tagged,start_tagged("munderover", detail_accent, {
+ top = getunicode(accent),
+ bottom = getunicode(bot_accent),
+ topfixed = subtype == math_fixed_top or subtype == math_fixed_both,
+ bottomfixed = subtype == math_fixed_bottom or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(bot_accent)
+ process(accent)
+ stop_tagged()
+ else
+ setattr(start,a_tagged,start_tagged("munder", detail_accent, {
+ bottom = getunicode(bot_accent),
+ bottomfixed = subtype == math_fixed_bottom or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(bot_accent)
+ stop_tagged()
+ end
+ elseif accent then
+ setattr(start,a_tagged,start_tagged("mover", detail_accent, {
+ top = getunicode(accent),
+ topfixed = subtype == math_fixed_top or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(accent)
+ stop_tagged()
+ else
+ processsubsup(start)
+ end
+ elseif id == glue_code then
+ -- local spec = getfield(start,"spec")
+ -- setattr(start,a_tagged,start_tagged("mspace",nil,spec and { width = getfield(spec,"width") }))
+ setattr(start,a_tagged,start_tagged("mspace"))
stop_tagged()
else
- processsubsup(start)
+ setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
+ stop_tagged()
end
- elseif id == glue_code then
- setattr(start,a_tagged,start_tagged("mspace"))
- stop_tagged()
- else
- setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
- stop_tagged()
end
start = getnext(start)
end
+ if mtexttag then
+ stop_tagged()
+ end
end
function noads.handlers.tags(head,style,penalties)
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index a45f4e440..5761e6cfb 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -34,6 +34,8 @@
% start todo:
+\def\c!svgstyle {svgstyle}
+
\def\c!nextleft {nextleft}
\def\c!nextright {nextright}
\def\c!nextleftquotation {nextleftquotation}
@@ -92,6 +94,10 @@
\def\v!mixed {mixed}
\def\v!centerlast {centerlast}
\def\v!long {long}
+\def\v!box {box}
+
+\def\v!vfenced {vfenced}
+\def\v!bothtext {bothtext}
\def\s!lcgreek {lcgreek}
\def\s!ucgreek {ucgreek}
diff --git a/tex/context/base/node-ltp.lua b/tex/context/base/node-ltp.lua
index 6ad5de140..5a826cc0d 100644
--- a/tex/context/base/node-ltp.lua
+++ b/tex/context/base/node-ltp.lua
@@ -2547,10 +2547,10 @@ function diagnostics.feasible_break(par, current, r, b, pi, d, artificial_demeri
par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
else
local save_link = getnext(current)
- setfield(cur_p,"next",nil)
+ setfield(current,"next",nil)
write_nl("log","")
par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
- setfield(cur_p,"next",save_link)
+ setfield(current,"next",save_link)
end
par.printed_node = current
end
@@ -3145,7 +3145,7 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
end
diagnostics.overfull_hbox(hlist,line,-delta)
end
- elseif order == 0 and hlist.list and last_badness > tex.hbadness then
+ elseif order == 0 and getlist(hlist) and last_badness > tex.hbadness then
diagnostics.bad_hbox(hlist,line,last_badness)
end
end
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 51d8f674e..52c1e4845 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -134,11 +134,13 @@ local default = {
strut = true,
hfraction = 1,
dfraction = 1,
+ bfraction = 0.25,
}
local fractions = {
minheight = "hfraction", maxheight = "hfraction",
mindepth = "dfraction", maxdepth = "dfraction",
+ box = "bfraction",
top = "tlines", bottom = "blines",
}
@@ -332,12 +334,33 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
end
local h = height or getfield(current,"height")
local d = depth or getfield(current,"depth")
- local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d
+ local hr, dr, ch, cd, br = method.hfraction or 1, method.dfraction or 1, h, d, method.bfraction or 0
local tlines, blines = method.tlines or 1, method.blines or 1
local done, plusht, plusdp = false, snapht, snapdp
local snaphtdp = snapht + snapdp
- if method.none then
+ if method.box then
+ local br = 1 - br
+ if br < 0 then
+ br = 0
+ elseif br > 1 then
+ br = 1
+ end
+ local n = ceiled((h+d-br*snapht-br*snapdp)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.max then
+ local n = ceiled((h+d)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.min then
+ local n = floored((h+d)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.none then
plusht, plusdp = 0, 0
if t then
t[#t+1] = "none: plusht 0pt plusdp 0pt"
diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv
index 409dd985c..7257b4ef9 100644
--- a/tex/context/base/spac-ver.mkiv
+++ b/tex/context/base/spac-ver.mkiv
@@ -1412,6 +1412,9 @@
\definegridsnapping[\v!none] [\v!none]
\definegridsnapping[\v!line] [\v!line]
\definegridsnapping[\v!strut] [\v!strut]
+\definegridsnapping[\v!box] [\v!box] % centers a box rounded upwards (box:.5 -> tolerance)
+\definegridsnapping[\v!min] [\v!min] % centers a box rounded downwards
+\definegridsnapping[\v!max] [\v!max] % centers a box rounded upwards
\definegridsnapping[\v!max] [\v!maxdepth,\v!maxheight,\v!strut]
\definegridsnapping[\v!min] [\v!mindepth,\v!minheight,\v!strut]
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 9dc680772..6898e14f2 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 3936e2e7c..aa73bb159 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua
index 05bfd7c93..157aa0e7e 100644
--- a/tex/context/base/status-mkiv.lua
+++ b/tex/context/base/status-mkiv.lua
@@ -542,6 +542,12 @@ return {
},
{
category = "mkiv",
+ filename = "lang-hyp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
filename = "unic-ini",
loading = "always",
status = "okay",
@@ -3502,6 +3508,12 @@ return {
},
{
category = "lua",
+ filename = "lang-hyp",
+ loading = "lang-hyp",
+ status = "okay",
+ },
+ {
+ category = "lua",
filename = "lang-txt",
loading = "lang-lab",
status = "okay",
diff --git a/tex/context/base/strc-tag.lua b/tex/context/base/strc-tag.lua
index f51c9d5d0..ad6117728 100644
--- a/tex/context/base/strc-tag.lua
+++ b/tex/context/base/strc-tag.lua
@@ -160,6 +160,11 @@ local properties = allocate {
mfenced = { pdf = "Span", nature = "display" },
maction = { pdf = "Span", nature = "display" },
+ mstacker = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackertop = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackerbot = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackermid = { pdf = "Span", nature = "display" }, -- these are only internally used
+
mtable = { pdf = "Table", nature = "display" }, -- might change
mtr = { pdf = "TR", nature = "display" }, -- might change
mtd = { pdf = "TD", nature = "display" }, -- might change
@@ -241,7 +246,7 @@ end
local nstack = 0
-function tags.start(tag,specification)
+function tags.start(tag,specification,props)
local label, detail, user
if specification then
label = specification.label
@@ -282,6 +287,9 @@ function tags.start(tag,specification)
tagmetadata[completetag] = metadata
metadata = nil
end
+ if props then
+ properties[completetag] = props
+ end
texattribute[a_tagged] = t
return t
end
diff --git a/tex/context/base/strc-tag.mkiv b/tex/context/base/strc-tag.mkiv
index 39dba8259..9e850c85e 100644
--- a/tex/context/base/strc-tag.mkiv
+++ b/tex/context/base/strc-tag.mkiv
@@ -87,6 +87,10 @@
\def\t!mathtablerow {mtr} % TR
\def\t!mathtablecell {mtd} % TD
\def\t!mathaction {maction} %
+\def\t!mathstacker {mstacker}
+\def\t!mathstackertop {mstackertop}
+\def\t!mathstackermid {mstackermid}
+\def\t!mathstackerbot {mstackerbot}
\def\t!list {list} % TOC
\def\t!listitem {listitem} % TOCI
diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua
index 5096e0042..8ec499ee4 100644
--- a/tex/context/base/typo-mar.lua
+++ b/tex/context/base/typo-mar.lua
@@ -165,9 +165,6 @@ local localpar_code = whatsitcodes.localpar
local nodepool = nuts.pool
local new_kern = nodepool.kern
-local new_glue = nodepool.glue
-local new_penalty = nodepool.penalty
-local new_stretch = nodepool.stretch
local new_usernumber = nodepool.usernumber
local new_latelua = nodepool.latelua
@@ -892,9 +889,9 @@ function margins.finalhandler(head)
-- if trace_margindata then
-- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed)
-- end
-head = tonut(head)
-local head, done = finalhandler(head)
-head = tonode(head)
+ head = tonut(head)
+ local head, done = finalhandler(head)
+ head = tonode(head)
return head, done
else
return head, false
diff --git a/tex/context/base/x-math-svg.lua b/tex/context/base/x-math-svg.lua
new file mode 100644
index 000000000..b96c2c63e
--- /dev/null
+++ b/tex/context/base/x-math-svg.lua
@@ -0,0 +1,162 @@
+if not modules then modules = { } end modules ['x-math-svg'] = {
+ version = 1.001,
+ comment = "companion to x-math-svg.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tostring, type, next = tostring, type, next
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
+
+local xmlfirst = xml.first
+local xmlconvert = xml.convert
+local xmlload = xml.load
+local xmlsave = xml.save
+local xmlcollected = xml.collected
+local xmldelete = xml.delete
+
+local loadtable = table.load
+local savetable = table.save
+
+local replacesuffix = file.replacesuffix
+local addsuffix = file.addsuffix
+local removefile = os.remove
+local isfile = lfs.isfile
+
+local formatters = string.formatters
+
+moduledata = moduledata or table.setmetatableindex("table")
+local svgmath = moduledata.svgmath -- autodefined
+
+local namedata = { }
+local pagedata = { }
+
+local statusname = "x-math-svg-status.lua"
+local pdfname = "x-math-svg.pdf"
+
+local pdftosvg = os.which("mudraw")
+
+local f_make_tex = formatters[ [[context --global kpse:x-math-svg.mkvi --inputfile="%s" --svgstyle="%s" --batch --noconsole --once --purgeall]] ]
+local f_make_svg = formatters[ [[mudraw -o "math-%%d.svg" "%s" 1-9999]] ]
+
+local f_inline = formatters[ [[<div class='math-inline' style='vertical-align:%p'></div>]] ]
+local f_display = formatters[ [[<div class='math-display'></div>]] ]
+
+local f_math_tmp = formatters[ [[math-%i]] ]
+
+function svgmath.process(filename)
+ if not filename then
+ -- no filename given
+ return
+ elseif not isfile(filename) then
+ -- invalid filename
+ return
+ end
+ local index = 0
+ local page = 0
+ local blobs = { }
+ local root = xmlload(filename)
+ for mth in xmlcollected(root,"math") do
+ index = index + 1
+ local blob = tostring(mth)
+ if blobs[blob] then
+ context.ReuseSVGMath(index,blobs[blob])
+ else
+ page = page + 1
+ buffers.assign(f_math_tmp(page),blob)
+ context.MakeSVGMath(index,page,mth.at.display)
+ blobs[blob] = page
+ end
+ end
+ context(function()
+ savetable(statusname, {
+ pagedata = pagedata,
+ namedata = namedata,
+ })
+ end)
+end
+
+function svgmath.register(index,page,specification)
+ if specification then
+ pagedata[page] = specification
+ end
+ namedata[index] = page
+end
+
+function svgmath.convert(filename,svgstyle)
+ if not filename then
+ -- no filename given
+ return false, "no filename"
+ elseif not isfile(filename) then
+ -- invalid filename
+ return false, "invalid filename"
+ elseif not pdftosvg then
+ return false, "mudraw is not installed"
+ end
+
+ os.execute(f_make_tex(filename,svgstyle))
+
+ local data = loadtable(statusname)
+ if not data then
+ -- invalid tex run
+ return false, "invalid tex run"
+ elseif not next(data) then
+ return false, "no converson needed"
+ end
+
+ local pagedata = data.pagedata
+ local namedata = data.namedata
+
+ os.execute(f_make_svg(pdfname))
+
+ local root = xmlload(filename)
+ local index = 0
+ local done = { }
+ local unique = 0
+
+ local between = (1-P("<"))^1/""
+ local strip = Cs((
+ (P("<text") * ((1-P("</text>"))^1) * P("</text>")) * between^0 / "" +
+ P(">") * between +
+ P(1)
+ )^1)
+
+ for mth in xmlcollected(root,"m:math") do
+ index = index + 1
+ local page = namedata[index]
+ if done[page] then
+ mth.__p__.dt[mth.ni] = done[page]
+ else
+ local info = pagedata[page]
+ local depth = info.depth
+ local mode = info.mode
+ local svgname = addsuffix(f_math_tmp(page),"svg")
+ local action = mode == "inline" and f_inline or f_display
+ local x_div = xmlfirst(xmlconvert(action(-depth)),"/div")
+ local svgdata = io.loaddata(svgname)
+ if not svgdata or svgdata == "" then
+ print("error in:",svgname,tostring(mth))
+ else
+ -- svgdata = string.gsub(svgdata,">%s<","")
+ svgdata = lpegmatch(strip,svgdata)
+ local x_svg = xmlfirst(xmlconvert(svgdata),"/svg")
+ -- xmldelete(x_svg,"text")
+ x_div.dt = { x_svg }
+ mth.__p__.dt[mth.ni] = x_div -- use helper
+ end
+ done[page] = x_div
+ unique = unique + 1
+ end
+ end
+
+-- for k, v in next, data do
+-- removefile(addsuffix(k,"svg"))
+-- end
+-- removefile(statusname)
+-- removefile(pdfname)
+
+ xmlsave(root,filename)
+
+ return true, index, unique
+end
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index 14cf583d1..093304e7d 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -1,4 +1,4 @@
-%D \modul
+%D \module
%D [ file=x-mathml,
%D version=2008.05.29,
%D title=\CONTEXT\ XML Modules,
@@ -23,6 +23,10 @@
%
% todo: more will be moved to lua (less hassle)
% todo: move left/right to the lua end
+%
+% this implememation looks like a hack ... this is because we deal with all weird cases we
+% ran into, including abuse that was supposed to render ok (even if it didn't in other
+% renderers) .. it was simply expected to work that way.
\writestatus{loading}{ConTeXt XML Macros / MathML Renderer}
@@ -2231,6 +2235,8 @@
}
\stopxmlsetups
+% helpers
+
\unexpanded\def\mmlexecuteifdefined#1%
{\ifx#1\empty
\expandafter\secondoftwoarguments
@@ -2241,142 +2247,156 @@
\fi\fi
{\csname#1\endcsname}}
-% todo: combine topaccent/over/bottomaccent/under check
-
-\definemathextensible [\v!mathematics] [mml:overleftarrow] ["2190] % ["27F5]
-\definemathextensible [\v!mathematics] [mml:overrightarrow] ["2192] % ["27F6]
-\definemathextensible [\v!mathematics] [mml:overleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mml:overtwoheadrightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mml:overleftharpoondown] ["21BD]
-\definemathextensible [\v!mathematics] [mml:overleftharpoonup] ["21BC]
-\definemathextensible [\v!mathematics] [mml:overrightharpoondown] ["21C1]
-\definemathextensible [\v!mathematics] [mml:overrightharpoonup] ["21C0]
-
-\definemathextensible [\v!mathematics] [mml:underleftarrow] ["2190] % ["27F5]
-\definemathextensible [\v!mathematics] [mml:underrightarrow] ["2192] % ["27F6]
-\definemathextensible [\v!mathematics] [mml:underleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mml:undertwoheadrightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mml:underleftharpoondown] ["21BD]
-\definemathextensible [\v!mathematics] [mml:underleftharpoonup] ["21BC]
-\definemathextensible [\v!mathematics] [mml:underrightharpoondown] ["21C1]
-\definemathextensible [\v!mathematics] [mml:underrightharpoonup] ["21C0]
-
-\definemathtriplet [\v!mathematics] [mmlovertriplet]
-\definemathtriplet [\v!mathematics] [mmlundertriplet]
-\definemathtriplet [\v!mathematics] [mmldoubletriplet]
-
-% alternative:
-%
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2190}] ["2190] % ["27F5]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2192}] ["2192] % ["27F6]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F5}] ["2190] % ["27F5]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F6}] ["2192] % ["27F6]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F7}] ["27F7]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F9}] ["27F9]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BD}] ["21BD]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BC}] ["21BC]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C1}] ["21C1]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C0}] ["21C0]
-
-\unexpanded\def\mmloverof#1{\mmlexecuteifdefined\mmlovercommand\relax{\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmloveros#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmloverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmloverbs#1{\mmlexecuteifdefined\mmlbasecommand\relax{\mmlunexpandedsecond{#1}}\relax}
+\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
-\startxmlsetups mml:mover
- \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
- \doifelseutfmathabove\mmlovertoken {
- \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
- \mmloverof{#1}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathabove\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandabove\mmlbasetoken}
- \mmloverbs{#1}
- } {
- \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
- \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
- \mmlundertriplet{\mmloverbf{#1}}{\mmloveros{#1}}{}%\relax
- }
- }
- % \limits % spoils spacing
-\stopxmlsetups
+\definemathtriplet [\v!mathematics] [mmlovertriplet] % or will we use a special instance
+\definemathtriplet [\v!mathematics] [mmlundertriplet] % or will we use a special instance
+\definemathtriplet [\v!mathematics] [mmldoubletriplet] % or will we use a special instance
-% alternative:
-%
-% \startxmlsetups mml:mover
-% \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
-% \doifelseutfmathabove\mmlovertoken {
-% \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
-% \mmloverof{#1}
-% } {
-% \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]/text()}}
-% \ifcsname mml:\mmlbasetoken\endcsname
-% \csname mml:\mmlbasetoken\endcsname{\mmlunexpandedsecond{#1}}\relax
-% \else
-% \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
-% \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
-% \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
-% \fi
-% }
-% % \limits % spoils spacing
-% \stopxmlsetups
+% common to munder/mover/munderover
-% do this in lua
+\starttexdefinition unexpanded mmlfencedfirst #1
+ \math_fences_checked_start
+ \mmlunexpandedfirst{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+\starttexdefinition unexpanded mmlfencedsecond #1
+ \math_fences_checked_start
+ \mmlunexpandedsecond{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+\starttexdefinition unexpanded mmlfencedthird #1
+ \math_fences_checked_start
+ \mmlunexpandedthird{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
-\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
+% mover
-% \unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\mmlunexpandedfirst {#1}}\relax}
-% \unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
-% \unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-% %unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
-% \unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax {\mmlunexpandedsecond{#1}}\relax}
+\starttexdefinition unexpanded mmloverabove #1
+ \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
+ \mmlexecuteifdefined\mmlovercommand {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmloverbase #1
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfencedfirst{#1}}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmloverbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand \relax {\mmlfencedsecond{#1}} {}
+\stoptexdefinition
+\starttexdefinition unexpanded mmloveraccent #1
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \mmlexecuteifdefined\mmlovercommand \relax {\mmlfencedfirst{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlovertext #1
+ \mmlovertriplet {\mmloverbase{#1}} {\mmloverabove{#1}} {}
+\stoptexdefinition
+\starttexdefinition unexpanded mmloveraccentchecker #1
+ \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \doifelseutfmathabove\mmlovertoken \mmloveraccent \mmlovertext {#1}
+\stoptexdefinition
-% \MMLhack
+\startxmlsetups mml:mover
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmloverbasefiller \mmloveraccentchecker {#1}
+\stopxmlsetups
-\unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\math_fences_checked_start\mmlunexpandedfirst {#1}\math_fences_checked_stop}\relax}
-\unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\math_fences_checked_start\mmlunexpandedsecond{#1}\math_fences_checked_stop}\relax}
-\unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\math_fences_checked_start\mmlunexpandedfirst {#1}\math_fences_checked_stop}\relax}
-%unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\math_fences_checked_start\mmlunexpandedsecond{#1}\math_fences_checked_stop}\relax}
-\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax {\math_fences_checked_start\mmlunexpandedsecond{#1}\math_fences_checked_stop}\relax}
+% munder
-\startxmlsetups mml:munder
+\starttexdefinition unexpanded mmlunderbelow #1
+ \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
+ \mmlexecuteifdefined\mmlundercommand {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderbase #1
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfencedfirst{#1}}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}%
+ \mmlexecuteifdefined\mmlbasecommand \relax {} {\mmlfencedsecond{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderaccent #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \mmlexecuteifdefined\mmlundercommand \relax {\mmlfencedfirst{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlundertext #1
+ \mmlundertriplet {\mmlunderbase{#1}} {} {\mmlunderbelow{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderaccentchecker #1
\edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
- \doifelseutfmathbelow\mmlundertoken {%
- \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
- \mmlunderuf{#1}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathbelow\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
- \mmlunderbs{#1}
- } {
- \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
- \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \mmlundertriplet{\mmlunderbf{#1}}{}{\mmlunderus{#1}}%\relax
- }
- }
- % \limits % spoils spacing
+ \doifelseutfmathbelow\mmlundertoken \mmlunderaccent \mmlundertext {#1}
+\stoptexdefinition
+
+\startxmlsetups mml:munder
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmlunderbasefiller \mmlunderaccentchecker {#1}
\stopxmlsetups
-\unexpanded\def\mmlunderoverst#1{\mmlexecuteifdefined\mmlbasecommand \relax{\mmlunexpandedsecond{#1}}{\mmlunexpandedthird{#1}}\relax}
-\unexpanded\def\mmlunderoverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmlunderoverus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmlunderoverot#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedthird {#1}}\relax}
+% munderover
-\startxmlsetups mml:munderover
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathbelow\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
- \mmlunderoverst{#1}
+\starttexdefinition unexpanded mmlunderoveraccentcheckerUO #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \edef\mmlovercommand {\utfmathcommandabove\mmlovertoken}
+ \edef\mmlbasecommand {\mmlovercommand\mmlundercommand}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}}
+ \else\ifcsname\mmlundercommand\endcsname
+ \ifcsname\mmlovercommand\endcsname
+ \csname\mmlovercommand\endcsname {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}}
+ \else
+ \mmldoubletriplet {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}} {\mmlfencedthird{#1}\mmlfencedthird{#1}} {}
+ \fi
+ \else\ifcsname\mmlovercommand\endcsname
+ \mmldoubletriplet {\csname\mmlovercommand\endcsname{\mmlfencedfirst{#1}}} {} {\mmlfencedsecond{#1}}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerUT #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \edef\mmlbasecommand {\mmlundercommand text}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}} {\mmlfencedthird{#1}}
+ \else\ifcsname\mmlundercommand\endcsname
+ \mmldoubletriplet {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}} {\mmlfencedthird{#1}} {}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerOT #1
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \edef\mmlbasecommand{\mmlovercommand text}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}} {\mmlfencedsecond{#1}}
+ \else\ifcsname\mmlovercommand\endcsname
+ \mmldoubletriplet {\csname\mmlovercommand\endcsname{\mmlfencedfirst{#1}}} {} {\mmlfencedsecond{#1}}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerTT #1
+ \mmldoubletriplet {\mmlfencedfirst{#1}} {\mmlfencedthird{#1}} {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentchecker #1
+ \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \edef\mmlovertoken {\mmlextensible{\xmlraw{#1}{/mml:*[3]}}}% /text()
+ \doifelseutfmathbelow\mmlundertoken {
+ \doifelseutfmathabove\mmlovertoken \mmlunderoveraccentcheckerUO \mmlunderoveraccentcheckerUT {#1}
} {
- \edef\mmlundertoken {\xmlraw{#1}{/mml:*[2]}}% /text()
- \edef\mmlovertoken {\xmlraw{#1}{/mml:*[3]}}% /text()
- \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
- \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \edef\mmlovercommand {\utfmathfiller\mmlovertoken}
- \mmldoubletriplet{\mmlunderoverbf{#1}}{\mmlunderoverot{#1}}{\mmlunderoverus{#1}}\relax
+ \doifelseutfmathabove\mmlovertoken \mmlunderoveraccentcheckerOT \mmlunderoveraccentcheckerTT {#1}
}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoverbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}%
+ \mmlexecuteifdefined\mmlbasecommand \relax {\mmlfencedthird{#1}} {\mmlfencedsecond{#1}}
+\stoptexdefinition
+
+\startxmlsetups mml:munderover
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmlunderoverbasefiller \mmlunderoveraccentchecker {#1}
\stopxmlsetups
% tables (mml:mtable, mml:mtr, mml:mlabledtr, mml:mtd)
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index d58fa5f1c..4aa03e6a4 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 09/18/14 11:17:09
+-- merge date : 09/26/14 11:42:21
do -- begin closure to overcome local limits and interference
@@ -6706,7 +6706,7 @@ local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.759
+otf.version=2.760
otf.cache=containers.define("fonts","otf",otf.version,true)
local fontdata=fonts.hashes.identifiers
local chardata=characters and characters.data
@@ -6832,7 +6832,6 @@ local valid_fields=table.tohash {
"extrema_bound",
"familyname",
"fontname",
- "fontname",
"fontstyle_id",
"fontstyle_name",
"fullname",
@@ -7067,6 +7066,7 @@ function otf.load(filename,sub,featurefile)
},
lookuptypes={},
},
+ warnings={},
metadata={
},
properties={
@@ -8194,6 +8194,10 @@ actions["check glyphs"]=function(data,filename,raw)
description.glyph=nil
end
end
+local valid=(lpeg.R("\x00\x7E")-lpeg.S("(){}[]<>%/ \n\r\f\v"))^0*lpeg.P(-1)
+local function valid_ps_name(str)
+ return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false
+end
actions["check metadata"]=function(data,filename,raw)
local metadata=data.metadata
for _,k in next,mainfields do
@@ -8211,9 +8215,36 @@ actions["check metadata"]=function(data,filename,raw)
end
end
if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
- local name=file.nameonly(filename)
- metadata.fontname="bad-fontname-"..name
- metadata.fullname="bad-fullname-"..name
+ local function valid(what)
+ local names=raw.names
+ for i=1,#names do
+ local list=names[i]
+ local names=list.names
+ if names then
+ local name=names[what]
+ if name and valid_ps_name(name) then
+ return name
+ end
+ end
+ end
+ end
+ local function check(what)
+ local oldname=metadata[what]
+ if valid_ps_name(oldname) then
+ report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ else
+ local newname=valid(what)
+ if not newname then
+ newname=formatters["bad-%s-%s"](what,file.nameonly(filename))
+ end
+ local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
+ data.warnings[#data.warnings+1]=warning
+ report_otf(warning)
+ metadata[what]=newname
+ end
+ end
+ check("fontname")
+ check("fullname")
end
end
actions["cleanup tables"]=function(data,filename,raw)
@@ -8334,6 +8365,7 @@ end
local function copytotfm(data,cache_id)
if data then
local metadata=data.metadata
+ local warnings=data.warnings
local resources=data.resources
local properties=derivetable(data.properties)
local descriptions=derivetable(data.descriptions)
@@ -8408,6 +8440,7 @@ local function copytotfm(data,cache_id)
local filename=constructors.checkedfilename(resources)
local fontname=metadata.fontname
local fullname=metadata.fullname or fontname
+ local psname=fontname or fullname
local units=metadata.units_per_em or 1000
if units==0 then
units=1000
@@ -8489,8 +8522,16 @@ local function copytotfm(data,cache_id)
properties.filename=filename
properties.fontname=fontname
properties.fullname=fullname
- properties.psname=fontname or fullname
+ properties.psname=psname
properties.name=filename or fullname
+ if warnings and #warnings>0 then
+ report_otf("warnings for font: %s",filename)
+ report_otf()
+ for i=1,#warnings do
+ report_otf(" %s",warnings[i])
+ end
+ report_otf()
+ end
return {
characters=characters,
descriptions=descriptions,
@@ -8499,6 +8540,7 @@ local function copytotfm(data,cache_id)
resources=resources,
properties=properties,
goodies=goodies,
+ warnings=warnings,
}
end
end