summaryrefslogtreecommitdiff
path: root/source
diff options
context:
space:
mode:
Diffstat (limited to 'source')
-rw-r--r--source/luametatex/CMakeLists.txt528
-rw-r--r--source/luametatex/build.txt86
-rw-r--r--source/luametatex/source/libraries/avl/readme.txt44
-rw-r--r--source/luametatex/source/libraries/libcerf/readme-luametatex.txt52
-rw-r--r--source/luametatex/source/libraries/miniz/readme.txt14
-rw-r--r--source/luametatex/source/libraries/pplib/ppapi.h2
-rw-r--r--source/luametatex/source/libraries/pplib/ppcrypt.c7
-rw-r--r--source/luametatex/source/libraries/pplib/ppcrypt.h1
-rw-r--r--source/luametatex/source/libraries/pplib/readme.txt6
-rw-r--r--source/luametatex/source/libraries/readme.txt72
-rw-r--r--source/luametatex/source/license.txt362
-rw-r--r--source/luametatex/source/lua/lmtcallbacklib.c13
-rw-r--r--source/luametatex/source/lua/lmtcallbacklib.h4
-rw-r--r--source/luametatex/source/lua/lmtinterface.c14
-rw-r--r--source/luametatex/source/lua/lmtinterface.h7
-rw-r--r--source/luametatex/source/lua/lmtnodelib.c71
-rw-r--r--source/luametatex/source/lua/lmtstatuslib.c2
-rw-r--r--source/luametatex/source/lua/lmttexlib.c27
-rw-r--r--source/luametatex/source/lua/lmttokenlib.c50
-rw-r--r--source/luametatex/source/luacore/lua54/originals/patches.txt22
-rw-r--r--source/luametatex/source/luacore/lua54/readme.txt16
-rw-r--r--source/luametatex/source/luacore/luapeg/readme.txt16
-rw-r--r--source/luametatex/source/luacore/readme.txt66
-rw-r--r--source/luametatex/source/luametatex.h2
-rw-r--r--source/luametatex/source/luaoptional/cmake/mujs/CMakeLists.txt214
-rw-r--r--source/luametatex/source/luaoptional/readme.txt60
-rw-r--r--source/luametatex/source/mp/mpc/mpmath.c34
-rw-r--r--source/luametatex/source/mp/mpw/mpmath.w34
-rw-r--r--source/luametatex/source/mp/mpw/mpmathdouble.w2
-rw-r--r--source/luametatex/source/mp/readme.txt26
-rw-r--r--source/luametatex/source/readme.txt1126
-rw-r--r--source/luametatex/source/tex/texadjust.c2
-rw-r--r--source/luametatex/source/tex/texalign.c4
-rw-r--r--source/luametatex/source/tex/texbuildpage.c11
-rw-r--r--source/luametatex/source/tex/texcommands.c25
-rw-r--r--source/luametatex/source/tex/texcommands.h19
-rw-r--r--source/luametatex/source/tex/texconditional.c27
-rw-r--r--source/luametatex/source/tex/texconditional.h3
-rw-r--r--source/luametatex/source/tex/texdumpdata.h2
-rw-r--r--source/luametatex/source/tex/texequivalents.c2
-rw-r--r--source/luametatex/source/tex/texequivalents.h18
-rw-r--r--source/luametatex/source/tex/texexpand.c20
-rw-r--r--source/luametatex/source/tex/texinputstack.c7
-rw-r--r--source/luametatex/source/tex/texinserts.c2
-rw-r--r--source/luametatex/source/tex/texlinebreak.c1310
-rw-r--r--source/luametatex/source/tex/texlinebreak.h8
-rw-r--r--source/luametatex/source/tex/texlocalboxes.c4
-rw-r--r--source/luametatex/source/tex/texmaincontrol.c106
-rw-r--r--source/luametatex/source/tex/texmaincontrol.h2
-rw-r--r--source/luametatex/source/tex/texmath.c156
-rw-r--r--source/luametatex/source/tex/texmath.h2
-rw-r--r--source/luametatex/source/tex/texmlist.c54
-rw-r--r--source/luametatex/source/tex/texnodes.c52
-rw-r--r--source/luametatex/source/tex/texnodes.h57
-rw-r--r--source/luametatex/source/tex/texpackaging.c122
-rw-r--r--source/luametatex/source/tex/texprimitive.c6
-rw-r--r--source/luametatex/source/tex/texprinting.c3
-rw-r--r--source/luametatex/source/tex/texrules.c196
-rw-r--r--source/luametatex/source/tex/texrules.h5
-rw-r--r--source/luametatex/source/tex/texscanning.c29
-rw-r--r--source/luametatex/source/tex/texscanning.h3
-rw-r--r--source/luametatex/source/tex/textoken.c68
-rw-r--r--source/luametatex/source/tex/textypes.h1
-rw-r--r--source/luametatex/source/utilities/auxposit.h12
64 files changed, 3002 insertions, 2316 deletions
diff --git a/source/luametatex/CMakeLists.txt b/source/luametatex/CMakeLists.txt
index 2fffa9ecb..e33b76aee 100644
--- a/source/luametatex/CMakeLists.txt
+++ b/source/luametatex/CMakeLists.txt
@@ -1,264 +1,264 @@
-cmake_minimum_required(VERSION 3.9)
-
-project(luametatex VERSION 2.10 LANGUAGES C)
-
-set(CMAKE_C_STANDARD 11)
-# set(CMAKE_CXX_STANDARD 17)
-
-# https://sourceforge.net/p/predef/wiki/OperatingSystems/
-# https://sourceforge.net/p/predef/wiki/Architectures/
-
-include(GNUInstallDirs)
-
-# Optionals (maybe have a LMT_*_TOO for each of them). We might start out with only a very few
-# optionals at some time, but for now we enable them (there is not not much code involved). The
-# idea behind thes eoptionals is that we have very simple (!) interfaces, delegating as much as
-# possible to Lua. We will *not* add interfaces with many bindings because that will introduce
-# dependencies (and looking at e.g. LuaTeX build updates shows that clearly: a no-go).
-
-set(LMT_KPSE_TOO 1) # In case we want to manage MKII scripts (etc) with mtxrun.
-set(LMT_HB_TOO 1) # Maybe handy for Idris' font development (old converted ffi stuff)
-
-# When set, because we're sparse we also strip the binary. Because we only gain some 1-2% on
-# runtime, enabling it makes not much sense:
-
-# set(LMT_OPTIMIZE 1)
-
-if (MSVC)
-
- if (CMAKE_C_COMPILER_ID STREQUAL "Clang")
-
- add_compile_options(
- -Wall
- -O2
-
- -Wcast-align
- -Wcast-qual
-
- -Wno-unknown-pragmas
- -fno-strict-aliasing
-
- -Wno-pedantic
- -Wno-deprecated-declarations
- -Wno-missing-noreturn
- -Wno-shadow
- )
-
- add_definitions(-D_CRT_SECURE_NO_WARNINGS)
-
- add_definitions(-DLMT_COMPILER_USED="clang")
-
- else()
-
- add_compile_options(
- /Wall
-
- /wd4127 # constant conditional expression
- /wd4131 # old style declarator
- /wd4152 # function pointer cast
- /wd4201 # nonstandard extension used: nameless struct/union
- /wd4244 # assignment in conditional expression
- /wd4456 # local vars with same name as outer variable
- /wd4457 # local vars with same function parameter
- /wd4464 # relative include path
- /wd4668 # missing defines
- /wd4702 # unreachable code
- /wd4710 # inlining
- /wd4711 # inlining
- /wd4774 # sprint argument 2 warning
- /wd4777 # format argument 2 warning
- /wd4820 # local vars with same name as outer variable
- /wd4996 # strdup etc warnings
- /wd5045 # spectre
-
- # /GL # whole program link optimization
- # /Gw # whole program data optimization (a little smaller bin)
-
- # /Ob3 # more agressive inline, much larger bin, no gain
-
- /wd4061 # enumerator * in switch * is not explicitly handles (mp)
- /wd4701 # potentially unitialized local variable (lua)
- /wd4255 # no function prototype given
-
- /wd5105 # macro expansion producing 'defined' has undefined behavior
-
- /wd4548 # expression before comma has no effect; expected expression with side-effect
-
- # indeed a bit faster but also a much larger binary:
-
- # /fp:fast
-
- # okay for amd processors too but no difference in size so probably no gain:
-
- # /favor:INTEL64
- # /fsanitize:address
- # /std:c17
-
- )
-
- # We always optimize ... symbols are not in the binary anyway so there is no advantage
- # (like when accessing Lua api functions). We could have an additional luametatex-lua.dll
- # but that also creates a dependency (possible conflict).
-
- # if (DEFINED LMT_OPTIMIZE)
- add_compile_options(
- /GL # whole program link optimization
- /Gw # whole program data optimization (a little smaller bin)
- )
- # endif()
-
- add_definitions(-DLMT_COMPILER_USED="msvc")
-
- endif()
-
- else()
-
- if (CMAKE_C_COMPILER_ID STREQUAL "Clang")
-
- # why not -03
-
- add_compile_options(
- -O2
- )
-
- add_definitions(-DLMT_COMPILER_USED="clang")
-
- else()
-
- add_compile_options(
- -O3
- # -g0
- # -mtune=nocona # fails on arm so more testing needed
- )
-
- add_definitions(-DLMT_COMPILER_USED="gcc")
-
- # add_compile_options(-pg)
- # add_link_options(-pg)
-
- endif()
-
- add_compile_options(
- -Wall
-
- -Wcast-align
- -Wcast-qual
-
- -Wno-unknown-pragmas
- -Wno-unused-result
- -fno-strict-aliasing
- )
-
- # for c17
- #
- # add_definitions(-D__STDC_WANT_LIB_EXT2__=1)
-
- if (DEFINED LMT_OPTIMIZE)
- if (NOT (${CMAKE_SYSTEM_NAME} MATCHES "Darwin"))
- set(CMAKE_EXE_LINKER_FLAGS "-s")
- endif()
- endif()
-
-endif()
-
-if (CMAKE_C_COMPILER_ID STREQUAL "Clang")
-
- add_compile_options(
- -Wno-unknown-warning-option
- -Wno-nonportable-include-path
- -Wno-nonportable-system-include-path
- -Wno-newline-eof
- -Wno-extra-semi-stmt
- -Wno-sign-conversion
- -Wno-unused-macros
- -Wno-reserved-id-macro
- -Wno-comma
- -Wno-switch-enum
- -Wno-shadow
- -Wno-missing-noreturn
- -Wno-implicit-fallthrough
- # -Wno-format
- -Wno-reserved-identifier
- -Wno-date-time
- -Wno-format-nonliteral
- -Wno-float-equal
- )
-
-endif()
-
-# Not that tested (converted ffi originals):
-
-if ((DEFINED LMT_KPSE_TOO))
- add_definitions(-DLMT_KPSE_TOO=1)
-endif()
-if ((DEFINED LMT_HB_TOO))
- add_definitions(-DLMT_HB_TOO=1)
-endif()
-
-# This needs cmake >= 3.9 and produces a 60K smaller mingw binary but it take quite a bit of
-# runtime to get there so it should become an option (apart from testing on all builders).
-
-if (DEFINED LMT_OPTIMIZE)
-
- include(CheckIPOSupported)
- check_ipo_supported(RESULT ipo_supported OUTPUT ipo_message)
-
- if (ipo_supported)
- #
- # We only have one program so we do it global (can become an -- option)
- #
- # set_property(TARGET luametatex PROPERTY INTERPROCEDURAL_OPTIMIZATION TRUE)
- #
- # mingw64: 2865664, nocona: 2819584, lto: 2835968 (around 1% gain on manual)
- #
- set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE)
- #
- else()
- # No message needed, just accept the fact.
- endif()
-
-endif()
-
-# Mimalloc is still under development, so we only support it on a few platforms. By the time it is
-# stable we can probably remove some of the following tests. A bit of a hack:
-#
-# When the old osx version is dropped and armhf is upgraded we can enable unix except solaris which
-# fails. So, only osx 10.6 and rpi 32 fail. But we will probably drop 32 bit in the future anyway.
-
-# CMAKE_HOST_SYSTEM_PROCESSOR arm64 x86_64
-
-if (CMAKE_HOST_SOLARIS)
- # fails
-elseif (MSVC)
- set(luametatex_use_mimalloc 1)
-elseif (CMAKE_HOST_APPLE AND NOT (${CMAKE_C_COMPILER} MATCHES "arm"))
- # fails on the osx intel
-elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv7l")
- # fails on the rpi 32 bit
-else()
- set(luametatex_use_mimalloc 1)
-endif()
-
-include_directories(${CMAKE_ROOT}/source)
-include_directories(${CMAKE_CURRENT_SOURCE_DIR}/source)
-
-if ((DEFINED luametatex_use_mimalloc))
- add_definitions(-DLUAMETATEX_USE_MIMALLOC=1)
- # add_definitions(-DMIMALLOC_RESET_DELAY=250)
- # set(luametatex_use_mimalloc 1)
- include(cmake/mimalloc.cmake)
-endif()
-
-include(cmake/tex.cmake)
-include(cmake/lua.cmake)
-include(cmake/mp.cmake)
-
-include(cmake/luarest.cmake)
-include(cmake/luasocket.cmake)
-include(cmake/luaoptional.cmake)
-
-include(cmake/pplib.cmake)
-include(cmake/miniz.cmake)
-include(cmake/softposit.cmake)
-
-include(cmake/luametatex.cmake)
+cmake_minimum_required(VERSION 3.9)
+
+project(luametatex VERSION 2.10 LANGUAGES C)
+
+set(CMAKE_C_STANDARD 11)
+# set(CMAKE_CXX_STANDARD 17)
+
+# https://sourceforge.net/p/predef/wiki/OperatingSystems/
+# https://sourceforge.net/p/predef/wiki/Architectures/
+
+include(GNUInstallDirs)
+
+# Optionals (maybe have a LMT_*_TOO for each of them). We might start out with only a very few
+# optionals at some time, but for now we enable them (there is not not much code involved). The
+# idea behind thes eoptionals is that we have very simple (!) interfaces, delegating as much as
+# possible to Lua. We will *not* add interfaces with many bindings because that will introduce
+# dependencies (and looking at e.g. LuaTeX build updates shows that clearly: a no-go).
+
+set(LMT_KPSE_TOO 1) # In case we want to manage MKII scripts (etc) with mtxrun.
+set(LMT_HB_TOO 1) # Maybe handy for Idris' font development (old converted ffi stuff)
+
+# When set, because we're sparse we also strip the binary. Because we only gain some 1-2% on
+# runtime, enabling it makes not much sense:
+
+# set(LMT_OPTIMIZE 1)
+
+if (MSVC)
+
+ if (CMAKE_C_COMPILER_ID STREQUAL "Clang")
+
+ add_compile_options(
+ -Wall
+ -O2
+
+ -Wcast-align
+ -Wcast-qual
+
+ -Wno-unknown-pragmas
+ -fno-strict-aliasing
+
+ -Wno-pedantic
+ -Wno-deprecated-declarations
+ -Wno-missing-noreturn
+ -Wno-shadow
+ )
+
+ add_definitions(-D_CRT_SECURE_NO_WARNINGS)
+
+ add_definitions(-DLMT_COMPILER_USED="clang")
+
+ else()
+
+ add_compile_options(
+ /Wall
+
+ /wd4127 # constant conditional expression
+ /wd4131 # old style declarator
+ /wd4152 # function pointer cast
+ /wd4201 # nonstandard extension used: nameless struct/union
+ /wd4244 # assignment in conditional expression
+ /wd4456 # local vars with same name as outer variable
+ /wd4457 # local vars with same function parameter
+ /wd4464 # relative include path
+ /wd4668 # missing defines
+ /wd4702 # unreachable code
+ /wd4710 # inlining
+ /wd4711 # inlining
+ /wd4774 # sprint argument 2 warning
+ /wd4777 # format argument 2 warning
+ /wd4820 # local vars with same name as outer variable
+ /wd4996 # strdup etc warnings
+ /wd5045 # spectre
+
+ # /GL # whole program link optimization
+ # /Gw # whole program data optimization (a little smaller bin)
+
+ # /Ob3 # more agressive inline, much larger bin, no gain
+
+ /wd4061 # enumerator * in switch * is not explicitly handles (mp)
+ /wd4701 # potentially unitialized local variable (lua)
+ /wd4255 # no function prototype given
+
+ /wd5105 # macro expansion producing 'defined' has undefined behavior
+
+ /wd4548 # expression before comma has no effect; expected expression with side-effect
+
+ # indeed a bit faster but also a much larger binary:
+
+ # /fp:fast
+
+ # okay for amd processors too but no difference in size so probably no gain:
+
+ # /favor:INTEL64
+ # /fsanitize:address
+ # /std:c17
+
+ )
+
+ # We always optimize ... symbols are not in the binary anyway so there is no advantage
+ # (like when accessing Lua api functions). We could have an additional luametatex-lua.dll
+ # but that also creates a dependency (possible conflict).
+
+ # if (DEFINED LMT_OPTIMIZE)
+ add_compile_options(
+ /GL # whole program link optimization
+ /Gw # whole program data optimization (a little smaller bin)
+ )
+ # endif()
+
+ add_definitions(-DLMT_COMPILER_USED="msvc")
+
+ endif()
+
+ else()
+
+ if (CMAKE_C_COMPILER_ID STREQUAL "Clang")
+
+ # why not -03
+
+ add_compile_options(
+ -O2
+ )
+
+ add_definitions(-DLMT_COMPILER_USED="clang")
+
+ else()
+
+ add_compile_options(
+ -O3
+ # -g0
+ # -mtune=nocona # fails on arm so more testing needed
+ )
+
+ add_definitions(-DLMT_COMPILER_USED="gcc")
+
+ # add_compile_options(-pg)
+ # add_link_options(-pg)
+
+ endif()
+
+ add_compile_options(
+ -Wall
+
+ -Wcast-align
+ -Wcast-qual
+
+ -Wno-unknown-pragmas
+ -Wno-unused-result
+ -fno-strict-aliasing
+ )
+
+ # for c17
+ #
+ # add_definitions(-D__STDC_WANT_LIB_EXT2__=1)
+
+ if (DEFINED LMT_OPTIMIZE)
+ if (NOT (${CMAKE_SYSTEM_NAME} MATCHES "Darwin"))
+ set(CMAKE_EXE_LINKER_FLAGS "-s")
+ endif()
+ endif()
+
+endif()
+
+if (CMAKE_C_COMPILER_ID STREQUAL "Clang")
+
+ add_compile_options(
+ -Wno-unknown-warning-option
+ -Wno-nonportable-include-path
+ -Wno-nonportable-system-include-path
+ -Wno-newline-eof
+ -Wno-extra-semi-stmt
+ -Wno-sign-conversion
+ -Wno-unused-macros
+ -Wno-reserved-id-macro
+ -Wno-comma
+ -Wno-switch-enum
+ -Wno-shadow
+ -Wno-missing-noreturn
+ -Wno-implicit-fallthrough
+ # -Wno-format
+ -Wno-reserved-identifier
+ -Wno-date-time
+ -Wno-format-nonliteral
+ -Wno-float-equal
+ )
+
+endif()
+
+# Not that tested (converted ffi originals):
+
+if ((DEFINED LMT_KPSE_TOO))
+ add_definitions(-DLMT_KPSE_TOO=1)
+endif()
+if ((DEFINED LMT_HB_TOO))
+ add_definitions(-DLMT_HB_TOO=1)
+endif()
+
+# This needs cmake >= 3.9 and produces a 60K smaller mingw binary but it take quite a bit of
+# runtime to get there so it should become an option (apart from testing on all builders).
+
+if (DEFINED LMT_OPTIMIZE)
+
+ include(CheckIPOSupported)
+ check_ipo_supported(RESULT ipo_supported OUTPUT ipo_message)
+
+ if (ipo_supported)
+ #
+ # We only have one program so we do it global (can become an -- option)
+ #
+ # set_property(TARGET luametatex PROPERTY INTERPROCEDURAL_OPTIMIZATION TRUE)
+ #
+ # mingw64: 2865664, nocona: 2819584, lto: 2835968 (around 1% gain on manual)
+ #
+ set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE)
+ #
+ else()
+ # No message needed, just accept the fact.
+ endif()
+
+endif()
+
+# Mimalloc is still under development, so we only support it on a few platforms. By the time it is
+# stable we can probably remove some of the following tests. A bit of a hack:
+#
+# When the old osx version is dropped and armhf is upgraded we can enable unix except solaris which
+# fails. So, only osx 10.6 and rpi 32 fail. But we will probably drop 32 bit in the future anyway.
+
+# CMAKE_HOST_SYSTEM_PROCESSOR arm64 x86_64
+
+if (CMAKE_HOST_SOLARIS)
+ # fails
+elseif (MSVC)
+ set(luametatex_use_mimalloc 1)
+elseif (CMAKE_HOST_APPLE AND NOT (${CMAKE_C_COMPILER} MATCHES "arm"))
+ # fails on the osx intel
+elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv7l")
+ # fails on the rpi 32 bit
+else()
+ set(luametatex_use_mimalloc 1)
+endif()
+
+include_directories(${CMAKE_ROOT}/source)
+include_directories(${CMAKE_CURRENT_SOURCE_DIR}/source)
+
+if ((DEFINED luametatex_use_mimalloc))
+ add_definitions(-DLUAMETATEX_USE_MIMALLOC=1)
+ # add_definitions(-DMIMALLOC_RESET_DELAY=250)
+ # set(luametatex_use_mimalloc 1)
+ include(cmake/mimalloc.cmake)
+endif()
+
+include(cmake/tex.cmake)
+include(cmake/lua.cmake)
+include(cmake/mp.cmake)
+
+include(cmake/luarest.cmake)
+include(cmake/luasocket.cmake)
+include(cmake/luaoptional.cmake)
+
+include(cmake/pplib.cmake)
+include(cmake/miniz.cmake)
+include(cmake/softposit.cmake)
+
+include(cmake/luametatex.cmake)
diff --git a/source/luametatex/build.txt b/source/luametatex/build.txt
index aa81254e6..40d8553eb 100644
--- a/source/luametatex/build.txt
+++ b/source/luametatex/build.txt
@@ -1,43 +1,43 @@
-Hi,
-
-The build script produce efficient static binaries with only a couple of system libraries as
-dependency. ConTeXt will not depend on anything else than provided here. Lua is the extension
-language to be used and that has worked well for quite a while now.
-
-The build script that is provided will compile under ./build so you might want to make a copy
-of the source tree to a suitable place that you can wipe after the job is done. The script
-accepts only a few command line arguments.
-
- build.sh :
-
- --native build/native meant for unix (linux, freebsd, openbsd, osx, arm)
- --mingw-32 build/mingw-32 meant for 32 bit windows (crosscompiled)
- --mingw-64 build/mingw-64 meant for 64 bit windows (crosscompiled)
-
-I develop LuaMetaTeX on Windows and use WLS (with OpenSuse) for cross compilation as well as
-native Linux binaries. Editing is done in Visual Studio with the exception of the MetaPost
-CWeb files for which I use SciTE.
-
-Because we use CMake, you can compile using the MSVC compiler as well as CLang. Currently the
-MingW crosscompiled binaries are slightly faster, next come the native ones, but till now
-CLang lags behind. The native compiler produces the smallest binaries and compiles fastest.
-
- build.cmd :
-
- --x64 build/msvc-cmd-x64 meant for 64 bit windows using intel/amd chips
- --x32 build/msvc-cmd-x86 meant for 32 bit windows using intel/amd chips
- --arm64 build/msvc-cmd-arm64 meant for 64 bit windows using arm chips
-
-Alternatively you can run a build job from Visual Studio. Of course it only works well if you
-have the right compilers installed which is easy to do from the user interface. All settings
-happen in CMakeLists.txt so you have to load that one.
-
-Support for LuaMetaTeX and ConTeXt is provided at the (dev-)context mailing lists and at the
-ConTeXt Wiki. Binaries are available at:
-
- https://build.contextgarden.net/#/waterfall?tags=c.luametatex
- https://dl.contextgarden.net/build/luametatex
-
-The first link shows the status, the second link is where the binaries can be downloaded.
-
-Hans Hagen
+Hi,
+
+The build script produce efficient static binaries with only a couple of system libraries as
+dependency. ConTeXt will not depend on anything else than provided here. Lua is the extension
+language to be used and that has worked well for quite a while now.
+
+The build script that is provided will compile under ./build so you might want to make a copy
+of the source tree to a suitable place that you can wipe after the job is done. The script
+accepts only a few command line arguments.
+
+ build.sh :
+
+ --native build/native meant for unix (linux, freebsd, openbsd, osx, arm)
+ --mingw-32 build/mingw-32 meant for 32 bit windows (crosscompiled)
+ --mingw-64 build/mingw-64 meant for 64 bit windows (crosscompiled)
+
+I develop LuaMetaTeX on Windows and use WLS (with OpenSuse) for cross compilation as well as
+native Linux binaries. Editing is done in Visual Studio with the exception of the MetaPost
+CWeb files for which I use SciTE.
+
+Because we use CMake, you can compile using the MSVC compiler as well as CLang. Currently the
+MingW crosscompiled binaries are slightly faster, next come the native ones, but till now
+CLang lags behind. The native compiler produces the smallest binaries and compiles fastest.
+
+ build.cmd :
+
+ --x64 build/msvc-cmd-x64 meant for 64 bit windows using intel/amd chips
+ --x32 build/msvc-cmd-x86 meant for 32 bit windows using intel/amd chips
+ --arm64 build/msvc-cmd-arm64 meant for 64 bit windows using arm chips
+
+Alternatively you can run a build job from Visual Studio. Of course it only works well if you
+have the right compilers installed which is easy to do from the user interface. All settings
+happen in CMakeLists.txt so you have to load that one.
+
+Support for LuaMetaTeX and ConTeXt is provided at the (dev-)context mailing lists and at the
+ConTeXt Wiki. Binaries are available at:
+
+ https://build.contextgarden.net/#/waterfall?tags=c.luametatex
+ https://dl.contextgarden.net/build/luametatex
+
+The first link shows the status, the second link is where the binaries can be downloaded.
+
+Hans Hagen
diff --git a/source/luametatex/source/libraries/avl/readme.txt b/source/luametatex/source/libraries/avl/readme.txt
index 90ff0949b..9b4708248 100644
--- a/source/luametatex/source/libraries/avl/readme.txt
+++ b/source/luametatex/source/libraries/avl/readme.txt
@@ -1,23 +1,23 @@
-Remark
-
-Usage of the avl library (irr) showed up in pdfTeX when Hartmut added some functionality. It therefore
-also ended up in being used in LuaTeX. The two files avl.c and avl.h come from pyavl and are in the
-public domain:
-
- license: this package, pyavl, is donated to the public domain
- author : Richard McGraw
- email : dasnar@fastmail.fm
-
-In the pdfTeX/LuaTeX the files were just there but I could track them down to
-
- https://github.com/pankajp/pyavl
-
-where the dates indicate that nothing has changed in the meantime. In the copies used here I added the
-information mentioned above. The files had some (experimental) code as well as optional testing on NULL
-values. As I don't expect updates (the code has been okay for quite a while) I made the tests mandate
-and removed the experimental code.
-
-We can strip this library and save some 10K on the binary because we don't need that much of it. That
-might happen at some point.
-
+Remark
+
+Usage of the avl library (irr) showed up in pdfTeX when Hartmut added some functionality. It therefore
+also ended up in being used in LuaTeX. The two files avl.c and avl.h come from pyavl and are in the
+public domain:
+
+ license: this package, pyavl, is donated to the public domain
+ author : Richard McGraw
+ email : dasnar@fastmail.fm
+
+In the pdfTeX/LuaTeX the files were just there but I could track them down to
+
+ https://github.com/pankajp/pyavl
+
+where the dates indicate that nothing has changed in the meantime. In the copies used here I added the
+information mentioned above. The files had some (experimental) code as well as optional testing on NULL
+values. As I don't expect updates (the code has been okay for quite a while) I made the tests mandate
+and removed the experimental code.
+
+We can strip this library and save some 10K on the binary because we don't need that much of it. That
+might happen at some point.
+
Hans Hagen \ No newline at end of file
diff --git a/source/luametatex/source/libraries/libcerf/readme-luametatex.txt b/source/luametatex/source/libraries/libcerf/readme-luametatex.txt
index bb552f263..4ba0240e1 100644
--- a/source/luametatex/source/libraries/libcerf/readme-luametatex.txt
+++ b/source/luametatex/source/libraries/libcerf/readme-luametatex.txt
@@ -1,26 +1,26 @@
-LS,
-
-In the following files you can find the comment below. We don't want to bother or burden the
-original authors with our problems. The cerf code is mostly used in MetaFun macros (by Alan
-Braslau). The c.h and cpp.h files are gone.
-
- defs.h
- cerf.h
-
----------------------------------------------------------------------------------------------
-This file is patched by Mojca Miklavec and Hans Hagen for usage in LuaMetaTeX where we use
-only C and also want to compile with the Microsoft compiler. So, when updating this library
-one has to check for changes. Not that we expect many as this is a rather stable library.
-
-In the other files there are a few macros used that deal with the multiplication and addition
-of complex and real nmbers. Of course the original code is kept as-is.
----------------------------------------------------------------------------------------------
-
-So, when updating the library you need to diff for the changes that are needed in order to
-compile the files with the Microsoft compiler.
-
-At some point I might patch the files so that we can intercept error messages in a way that
-permits recovery and also plugs them into our normal message handlers. Maybe I should also
-merge the code into just one file because it doesn't change.
-
-Hans
+LS,
+
+In the following files you can find the comment below. We don't want to bother or burden the
+original authors with our problems. The cerf code is mostly used in MetaFun macros (by Alan
+Braslau). The c.h and cpp.h files are gone.
+
+ defs.h
+ cerf.h
+
+---------------------------------------------------------------------------------------------
+This file is patched by Mojca Miklavec and Hans Hagen for usage in LuaMetaTeX where we use
+only C and also want to compile with the Microsoft compiler. So, when updating this library
+one has to check for changes. Not that we expect many as this is a rather stable library.
+
+In the other files there are a few macros used that deal with the multiplication and addition
+of complex and real nmbers. Of course the original code is kept as-is.
+---------------------------------------------------------------------------------------------
+
+So, when updating the library you need to diff for the changes that are needed in order to
+compile the files with the Microsoft compiler.
+
+At some point I might patch the files so that we can intercept error messages in a way that
+permits recovery and also plugs them into our normal message handlers. Maybe I should also
+merge the code into just one file because it doesn't change.
+
+Hans
diff --git a/source/luametatex/source/libraries/miniz/readme.txt b/source/luametatex/source/libraries/miniz/readme.txt
index 8a5e6979e..4527133d7 100644
--- a/source/luametatex/source/libraries/miniz/readme.txt
+++ b/source/luametatex/source/libraries/miniz/readme.txt
@@ -1,8 +1,8 @@
-Remark
-
-Conform the recommendation we use the official merged files (release) not the github files. Also, we
-only use part of that single file because we do all file handling ourselves because we operate within
-the file name regime of LuaMetaTeX that is aware of operating system specifics like wide filenames on
-MSWindows). We don't drop in updates without careful checking them first for potential clashes.\\
-
+Remark
+
+Conform the recommendation we use the official merged files (release) not the github files. Also, we
+only use part of that single file because we do all file handling ourselves because we operate within
+the file name regime of LuaMetaTeX that is aware of operating system specifics like wide filenames on
+MSWindows). We don't drop in updates without careful checking them first for potential clashes.\\
+
release url: https://github.com/richgel999/miniz/releases \ No newline at end of file
diff --git a/source/luametatex/source/libraries/pplib/ppapi.h b/source/luametatex/source/libraries/pplib/ppapi.h
index e9ced5718..56137f8f2 100644
--- a/source/luametatex/source/libraries/pplib/ppapi.h
+++ b/source/luametatex/source/libraries/pplib/ppapi.h
@@ -8,7 +8,7 @@
#include "ppconf.h"
-#define pplib_version "v2.1"
+#define pplib_version "v2.2"
#define pplib_author "p.jackowski@gust.org.pl"
/* types */
diff --git a/source/luametatex/source/libraries/pplib/ppcrypt.c b/source/luametatex/source/libraries/pplib/ppcrypt.c
index ce63e7cab..832e8c327 100644
--- a/source/luametatex/source/libraries/pplib/ppcrypt.c
+++ b/source/luametatex/source/libraries/pplib/ppcrypt.c
@@ -137,7 +137,7 @@ static int ppcrypt_password_encoding (uint8_t *password, size_t *passwordlength)
{
uint8_t *p, newpassword[PPCRYPT_MAX_PASSWORD], *n;
const uint8_t *e;
- uint32_t unicode;
+ uint32_t unicode = 0;
for (n = &newpassword[0], p = &password[0], e = p + *passwordlength; p < e; ++n)
{
@@ -398,7 +398,10 @@ static ppcrypt_status ppcrypt_authenticate_permissions (ppcrypt *crypt, ppstring
aes_decode_data(perms->data, perms->size, permsdata, crypt->filekey, crypt->filekeylength, nulliv, AES_NULL_PADDING);
if (permsdata[9] != 'a' || permsdata[10] != 'd' || permsdata[11] != 'b')
- return PPCRYPT_FAIL;
+ { /* if we get here, the password hash is correct, we don't need to fail because of unreadable perms (found such docs) */
+ crypt->flags |= PPCRYPT_UNREADABLE_PERMISSIONS;
+ return PPCRYPT_DONE;
+ }
/* do not check/update permissions flags here; they might be different inside crypt string */
if (0)
diff --git a/source/luametatex/source/libraries/pplib/ppcrypt.h b/source/luametatex/source/libraries/pplib/ppcrypt.h
index 9fa52d878..a7131adbb 100644
--- a/source/luametatex/source/libraries/pplib/ppcrypt.h
+++ b/source/luametatex/source/libraries/pplib/ppcrypt.h
@@ -46,6 +46,7 @@ typedef struct {
#define PPCRYPT_STRING_RC4 (1<<4)
#define PPCRYPT_STREAM_AES (1<<5)
#define PPCRYPT_STRING_AES (1<<6)
+#define PPCRYPT_UNREADABLE_PERMISSIONS (1<<7)
#define PPCRYPT_STREAM (PPCRYPT_STREAM_AES|PPCRYPT_STREAM_RC4)
#define PPCRYPT_STRING (PPCRYPT_STRING_AES|PPCRYPT_STRING_RC4)
diff --git a/source/luametatex/source/libraries/pplib/readme.txt b/source/luametatex/source/libraries/pplib/readme.txt
index ee5d141dc..550367a28 100644
--- a/source/luametatex/source/libraries/pplib/readme.txt
+++ b/source/luametatex/source/libraries/pplib/readme.txt
@@ -1,3 +1,3 @@
-This is (to be) added to util/utilflate.c:
-
-# include "../../utilities/auxzlib.h"
+This is (to be) added to util/utilflate.c:
+
+# include "../../utilities/auxzlib.h"
diff --git a/source/luametatex/source/libraries/readme.txt b/source/luametatex/source/libraries/readme.txt
index f249eae99..a4ccbfc0b 100644
--- a/source/luametatex/source/libraries/readme.txt
+++ b/source/luametatex/source/libraries/readme.txt
@@ -1,37 +1,37 @@
-Nota bene,
-
-The currently embedded libcerf library might become an optional one as soon as we decide to provide
-it as such. It doesn't put a dent in filesize but as it's used rarely (and mostly as complement to
-the complex math support) that makes sense. The library was added because some users wanted it as
-companion the other math libraries and because TeX is often about math it sort of feels okay. But
-it looks like there will never be support for the MSVC compiler. Mojca and I (Hans) adapted the
-sources included here to compile out of the box, but that didn't make it back into the original.
-
-The pplib library has a few patches with respect to memory allocation and zip compression so that
-we can hook in the minizip and mimalloc alternatives.
-
-The avl and hnj libraries are adapted to Lua(Meta)TeX and might get some more adaptations depending
-on our needs. The decnumber library that is also used in mplib is unchanged.
-
-In mimalloc we need to patch init.c: #if defined(_M_X64) || defined(_M_ARM64) to get rid of a link
-error as well as in options.c some snprint issue with the mingw64 cross compiler:
-
-/* HH */ snprintf(tprefix, sizeof(tprefix), "%sthread 0x%x: ", prefix, (unsigned) _mi_thread_id()); /* HH: %z is unknown */
-
-In decNumber.c this got added:
-
-# include "../../utilities/auxmemory.h"
-# define malloc lmt_memory_malloc
-# define free lmt_memory_free
-
-In softposit/source/include/softposit_types.h we have to comment the initializations in the unions
-bcause the compiler complains about it (we're not using c++). So:
-
-uint32_t ui; // =0; // patched by HH because the compilers don't like this
-uint64_t ui[2]; // ={0,0}; // idem
-uint64_t ui[8]; // ={0,0,0,0, 0,0,0,0}; // idme
-uint64_t ui[8]; // ={0,0,0,0, 0,0,0,0}; // idem
-uint64_t ui[8]; // ={0,0,0,0, 0,0,0,0}; // idem
-
-
+Nota bene,
+
+The currently embedded libcerf library might become an optional one as soon as we decide to provide
+it as such. It doesn't put a dent in filesize but as it's used rarely (and mostly as complement to
+the complex math support) that makes sense. The library was added because some users wanted it as
+companion the other math libraries and because TeX is often about math it sort of feels okay. But
+it looks like there will never be support for the MSVC compiler. Mojca and I (Hans) adapted the
+sources included here to compile out of the box, but that didn't make it back into the original.
+
+The pplib library has a few patches with respect to memory allocation and zip compression so that
+we can hook in the minizip and mimalloc alternatives.
+
+The avl and hnj libraries are adapted to Lua(Meta)TeX and might get some more adaptations depending
+on our needs. The decnumber library that is also used in mplib is unchanged.
+
+In mimalloc we need to patch init.c: #if defined(_M_X64) || defined(_M_ARM64) to get rid of a link
+error as well as in options.c some snprint issue with the mingw64 cross compiler:
+
+/* HH */ snprintf(tprefix, sizeof(tprefix), "%sthread 0x%x: ", prefix, (unsigned) _mi_thread_id()); /* HH: %z is unknown */
+
+In decNumber.c this got added:
+
+# include "../../utilities/auxmemory.h"
+# define malloc lmt_memory_malloc
+# define free lmt_memory_free
+
+In softposit/source/include/softposit_types.h we have to comment the initializations in the unions
+bcause the compiler complains about it (we're not using c++). So:
+
+uint32_t ui; // =0; // patched by HH because the compilers don't like this
+uint64_t ui[2]; // ={0,0}; // idem
+uint64_t ui[8]; // ={0,0,0,0, 0,0,0,0}; // idme
+uint64_t ui[8]; // ={0,0,0,0, 0,0,0,0}; // idem
+uint64_t ui[8]; // ={0,0,0,0, 0,0,0,0}; // idem
+
+
Hans \ No newline at end of file
diff --git a/source/luametatex/source/license.txt b/source/luametatex/source/license.txt
index f98c98819..31f70ac1c 100644
--- a/source/luametatex/source/license.txt
+++ b/source/luametatex/source/license.txt
@@ -1,181 +1,181 @@
-------------------------------------------------------------------------------------------
-PREAMBLE
-------------------------------------------------------------------------------------------
-
-The LuaMetaTeX program is a light weight variant of LuaTeX. This program finds its origin
-in parts of TeX (the original program, eTeX (some extensions), pdfTeX (more extensions)
-Aleph (based on Omega, directions) and of course LuaTeX (lots of things).
-
-So, basically we follow up on LuaTeX which itself is a follow up on TeX, eTeX, pdfTeX and
-Aleph. The actual starting point (in 2005) was a special Lua enhanced version of pdfTeX
-by Hartmut Henkel that we experimented with and triggered a follow up. However, the code
-base is no longer Pascal (which then gets converted to C) but regular C code instead. That
-conversion was done by Taco Hoekwater as part of the Oriental TeX project.
-
-There are many articles (presentations and documents) that discuss the development history.
-These articles and documents describing the projects shed more light on what functionality
-showed up when and why. As these projects closely relate to ConTeXt development you can
-find those documents in the ConTeXt distribution.
-
-After this conversion quite some implementation details changed over the decade that
-followed: memory management was adapted, string handling became dynamic, managing the
-table of equivalents was tuned to the mix, callbacks were added. The opening up resulted in
-some adaption of the internals too. Font handling changed, math support for opentype math
-has been introduced. Hyphenation handling, ligature building and kerning are clearly
-separated and language support has been rewritten from scratch. In addition to Lua, the
-TeX engine is also complemented by the MetaPost library. Luigi Scarso added support for
-LuaJIT and ffi and over time makes sure that the code works out okay in the regular
-TeXLive build too.
-
-In 2018-2019 the conversion from LuaTeX to LuaMetaTeX was done by Hans Hagen as part of
-an attempt to simplify the build and get rid of code that might have been useful when we
-started but no longer makes sense. Because the LuaTeX interfaces had to stabelize, this
-follow up also provides us a new testbed. The LuaMetaTeX source code is distributed as
-part of the ConTeXt distribution which is also used for testing and development. Most
-tests are done by those involved in ConTeXt development, so issues should be reported to
-the mailing lists related to this macro package.
-
-In the process the code base has been adapted substantially, although the decade of
-LuaTeX development already prepared for that. This also leads occasionally instable
-setups. We're grateful to ConTeXt community for their patience in testing these continuous
-developments.
-
-The license below is from LuaTeX and also applies to LuaMetaTeX. Although other team
-members contribute(d) to the code, we stick to this description. The --credits option
-provides more information.
-
-------------------------------------------------------------------------------------------
-EXCUSE
-------------------------------------------------------------------------------------------
-
-Although some code comes from pdfTeX and Aleph, the majority comes from good old TeX and
-eTeX or is completely new. Original TeX is a well documented program written in WEB and
-those building upon it have added comments. In LuaMetaTeX we use plain C files but the
-comments are still mostly present. When you read them you really need to keep in mind that
-some refer to good old TeX! The nice comments come from Don Knuth, the bad and fuzzy ones
-originate at us. We appologize to Don for this.
-
-------------------------------------------------------------------------------------------
-CREDITS
-------------------------------------------------------------------------------------------
-
-LuaMetaTeX builds upon the code from LuaTeX which comes from:
-
- tex : Donald Knuth
- etex : Peter Breitenlohner, Phil Taylor and friends
-
-The expansion and protrusion code is derived from:
-
- pdftex : Han The Thanh and friends
-
-Some of the bidirectional text flow model is taken from:
-
- omega : John Plaice and Yannis Haralambous
- aleph : Giuseppe Bilotta
-
-Graphic support is provided by:
-
- metapost : John Hobby, Taco Hoekwater, Luigi Scarso, Hans Hagen and friends
-
-All this is opened up with:
-
- lua : Roberto Ierusalimschy, Waldemar Celes and Luiz Henrique de Figueiredo
- lpeg : Roberto Ierusalimschy
-
-A few libraries are embedded, of which we mention:
-
- avl : Richard McGraw (adapted)
- decnumber : Mike Cowlishaw (IBM)
- libcerf : Joachim Wuttke (adapted to msvc)
- md5 : Peter Deutsch (with partial code from pplib libraries)
- pplib : Paweł Jackowski (with partial code from libraries)
- sha2 : Aaron D. Gifford (with partial code from pplib libraries)
- socket : Diego Nehab (partial and adapted)
- # zlib : Jean-loup Gailly and Mark Adler
- miniz : Rich Geldreich etc.
- mimalloc : Daan Leijen (Microsoft Research)
-
-The code base contains more names and references. Some libraries are partially adapted. We
-use an adapted version of the lfs from the Kepler Project. Also, MetaPost used decNumber
-for decimal number mode.
-
-Depending on demand a few optional libraries can be used, for instance curl, imagemagick,
-lz4, lzo, mujs, mysql, sqlite and zint but there are no dependencies and only very limited
-interfaces are provided (ConTeXt provides \LUA\ layers on top).
-
-------------------------------------------------------------------------------------------
-TEAM
-------------------------------------------------------------------------------------------
-
-LuaTeX : Hans Hagen, Hartmut Henkel, Taco Hoekwater, Luigi Scarso
-LuaMetaTeX : Hans Hagen, Wolfgang Schuster, Mojca Miklavec, Alan Braslau
-
-------------------------------------------------------------------------------------------
-MAIN LICENSE (consider it part of each file that refers to this file)
-------------------------------------------------------------------------------------------
-
-Copyright Taco Hoekwater & Hans Hagen & Wolfgang Schuster
-
-This file is part of LuaMetaTeX.
-
-LuaMetaTeX is free software; you can redistribute it and/or modify it under the terms of
-the GNU General Public License as published by the Free Software Foundation; either
-version 2 of the License, or (at your option) any later version.
-
-LuaMetaTeX is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-See the GNU Lesser General Public License for more details.
-
-You could have received a copy of the GNU General Public License along with LuaMetaTeX; if
-not, see <http://www.gnu.org/licenses/>.
-
-------------------------------------------------------------------------------------------
-REMARK
-------------------------------------------------------------------------------------------
-
-The Lua(Meta)TeX team can always decide to relicense to a variant licence in the future,
-but please don't start one of these religious licence discussions with us. We like what
-we're doing, we permits others to use it, and that is the bottomline.
-
-------------------------------------------------------------------------------------------
-OTHER LICENSES
-------------------------------------------------------------------------------------------
-
-The files taken from elsewhere have their own license information at the top of the files
-or in a file their path. It's a mixed bag but basically all permit usage and extensions.
-
-------------------------------------------------------------------------------------------
-BUILD | FARM | REPOSITORIES
-------------------------------------------------------------------------------------------
-
-Mojca Miklavec, Hans Hagen, Alan Braslau
-
-------------------------------------------------------------------------------------------
-CONTEXT MKII | MKIV | LMTX
-------------------------------------------------------------------------------------------
-
-Hans Hagen, Wolfgang Schuster, etc
-
-------------------------------------------------------------------------------------------
-WEBSITE
-------------------------------------------------------------------------------------------
-
-http://www.luatex.org
-http://contextgarden.net
-http://www.pragma-ade.com / http://www.pragma-nl.com
-
-------------------------------------------------------------------------------------------
-SUPPORT
-------------------------------------------------------------------------------------------
-
-http://www.ntg.nl/mailman/listinfo/ntg-context
-http://www.ntg.nl/mailman/listinfo/dev-context
-
-------------------------------------------------------------------------------------------
-MORE LINKS
-------------------------------------------------------------------------------------------
-
-http://www.ntg.nl
-http://www.tug.org
-
-------------------------------------------------------------------------------------------
+------------------------------------------------------------------------------------------
+PREAMBLE
+------------------------------------------------------------------------------------------
+
+The LuaMetaTeX program is a light weight variant of LuaTeX. This program finds its origin
+in parts of TeX (the original program, eTeX (some extensions), pdfTeX (more extensions)
+Aleph (based on Omega, directions) and of course LuaTeX (lots of things).
+
+So, basically we follow up on LuaTeX which itself is a follow up on TeX, eTeX, pdfTeX and
+Aleph. The actual starting point (in 2005) was a special Lua enhanced version of pdfTeX
+by Hartmut Henkel that we experimented with and triggered a follow up. However, the code
+base is no longer Pascal (which then gets converted to C) but regular C code instead. That
+conversion was done by Taco Hoekwater as part of the Oriental TeX project.
+
+There are many articles (presentations and documents) that discuss the development history.
+These articles and documents describing the projects shed more light on what functionality
+showed up when and why. As these projects closely relate to ConTeXt development you can
+find those documents in the ConTeXt distribution.
+
+After this conversion quite some implementation details changed over the decade that
+followed: memory management was adapted, string handling became dynamic, managing the
+table of equivalents was tuned to the mix, callbacks were added. The opening up resulted in
+some adaption of the internals too. Font handling changed, math support for opentype math
+has been introduced. Hyphenation handling, ligature building and kerning are clearly
+separated and language support has been rewritten from scratch. In addition to Lua, the
+TeX engine is also complemented by the MetaPost library. Luigi Scarso added support for
+LuaJIT and ffi and over time makes sure that the code works out okay in the regular
+TeXLive build too.
+
+In 2018-2019 the conversion from LuaTeX to LuaMetaTeX was done by Hans Hagen as part of
+an attempt to simplify the build and get rid of code that might have been useful when we
+started but no longer makes sense. Because the LuaTeX interfaces had to stabelize, this
+follow up also provides us a new testbed. The LuaMetaTeX source code is distributed as
+part of the ConTeXt distribution which is also used for testing and development. Most
+tests are done by those involved in ConTeXt development, so issues should be reported to
+the mailing lists related to this macro package.
+
+In the process the code base has been adapted substantially, although the decade of
+LuaTeX development already prepared for that. This also leads occasionally instable
+setups. We're grateful to ConTeXt community for their patience in testing these continuous
+developments.
+
+The license below is from LuaTeX and also applies to LuaMetaTeX. Although other team
+members contribute(d) to the code, we stick to this description. The --credits option
+provides more information.
+
+------------------------------------------------------------------------------------------
+EXCUSE
+------------------------------------------------------------------------------------------
+
+Although some code comes from pdfTeX and Aleph, the majority comes from good old TeX and
+eTeX or is completely new. Original TeX is a well documented program written in WEB and
+those building upon it have added comments. In LuaMetaTeX we use plain C files but the
+comments are still mostly present. When you read them you really need to keep in mind that
+some refer to good old TeX! The nice comments come from Don Knuth, the bad and fuzzy ones
+originate at us. We appologize to Don for this.
+
+------------------------------------------------------------------------------------------
+CREDITS
+------------------------------------------------------------------------------------------
+
+LuaMetaTeX builds upon the code from LuaTeX which comes from:
+
+ tex : Donald Knuth
+ etex : Peter Breitenlohner, Phil Taylor and friends
+
+The expansion and protrusion code is derived from:
+
+ pdftex : Han The Thanh and friends
+
+Some of the bidirectional text flow model is taken from:
+
+ omega : John Plaice and Yannis Haralambous
+ aleph : Giuseppe Bilotta
+
+Graphic support is provided by:
+
+ metapost : John Hobby, Taco Hoekwater, Luigi Scarso, Hans Hagen and friends
+
+All this is opened up with:
+
+ lua : Roberto Ierusalimschy, Waldemar Celes and Luiz Henrique de Figueiredo
+ lpeg : Roberto Ierusalimschy
+
+A few libraries are embedded, of which we mention:
+
+ avl : Richard McGraw (adapted)
+ decnumber : Mike Cowlishaw (IBM)
+ libcerf : Joachim Wuttke (adapted to msvc)
+ md5 : Peter Deutsch (with partial code from pplib libraries)
+ pplib : Paweł Jackowski (with partial code from libraries)
+ sha2 : Aaron D. Gifford (with partial code from pplib libraries)
+ socket : Diego Nehab (partial and adapted)
+ # zlib : Jean-loup Gailly and Mark Adler
+ miniz : Rich Geldreich etc.
+ mimalloc : Daan Leijen (Microsoft Research)
+
+The code base contains more names and references. Some libraries are partially adapted. We
+use an adapted version of the lfs from the Kepler Project. Also, MetaPost used decNumber
+for decimal number mode.
+
+Depending on demand a few optional libraries can be used, for instance curl, imagemagick,
+lz4, lzo, mujs, mysql, sqlite and zint but there are no dependencies and only very limited
+interfaces are provided (ConTeXt provides \LUA\ layers on top).
+
+------------------------------------------------------------------------------------------
+TEAM
+------------------------------------------------------------------------------------------
+
+LuaTeX : Hans Hagen, Hartmut Henkel, Taco Hoekwater, Luigi Scarso
+LuaMetaTeX : Hans Hagen, Wolfgang Schuster, Mojca Miklavec, Alan Braslau
+
+------------------------------------------------------------------------------------------
+MAIN LICENSE (consider it part of each file that refers to this file)
+------------------------------------------------------------------------------------------
+
+Copyright Taco Hoekwater & Hans Hagen & Wolfgang Schuster
+
+This file is part of LuaMetaTeX.
+
+LuaMetaTeX is free software; you can redistribute it and/or modify it under the terms of
+the GNU General Public License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+LuaMetaTeX is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+See the GNU Lesser General Public License for more details.
+
+You could have received a copy of the GNU General Public License along with LuaMetaTeX; if
+not, see <http://www.gnu.org/licenses/>.
+
+------------------------------------------------------------------------------------------
+REMARK
+------------------------------------------------------------------------------------------
+
+The Lua(Meta)TeX team can always decide to relicense to a variant licence in the future,
+but please don't start one of these religious licence discussions with us. We like what
+we're doing, we permits others to use it, and that is the bottomline.
+
+------------------------------------------------------------------------------------------
+OTHER LICENSES
+------------------------------------------------------------------------------------------
+
+The files taken from elsewhere have their own license information at the top of the files
+or in a file their path. It's a mixed bag but basically all permit usage and extensions.
+
+------------------------------------------------------------------------------------------
+BUILD | FARM | REPOSITORIES
+------------------------------------------------------------------------------------------
+
+Mojca Miklavec, Hans Hagen, Alan Braslau
+
+------------------------------------------------------------------------------------------
+CONTEXT MKII | MKIV | LMTX
+------------------------------------------------------------------------------------------
+
+Hans Hagen, Wolfgang Schuster, etc
+
+------------------------------------------------------------------------------------------
+WEBSITE
+------------------------------------------------------------------------------------------
+
+http://www.luatex.org
+http://contextgarden.net
+http://www.pragma-ade.com / http://www.pragma-nl.com
+
+------------------------------------------------------------------------------------------
+SUPPORT
+------------------------------------------------------------------------------------------
+
+http://www.ntg.nl/mailman/listinfo/ntg-context
+http://www.ntg.nl/mailman/listinfo/dev-context
+
+------------------------------------------------------------------------------------------
+MORE LINKS
+------------------------------------------------------------------------------------------
+
+http://www.ntg.nl
+http://www.tug.org
+
+------------------------------------------------------------------------------------------
diff --git a/source/luametatex/source/lua/lmtcallbacklib.c b/source/luametatex/source/lua/lmtcallbacklib.c
index a971b5e57..ec9788c13 100644
--- a/source/luametatex/source/lua/lmtcallbacklib.c
+++ b/source/luametatex/source/lua/lmtcallbacklib.c
@@ -54,6 +54,7 @@ static const char *callbacklib_names[total_callbacks] = {
"show_warning_message",
"hpack_quality",
"vpack_quality",
+ "show_break",
"insert_par",
"append_line_filter",
"build_page_insert",
@@ -307,7 +308,7 @@ static int callbacklib_aux_run(lua_State *L, int id, int special, const char *va
return tex_formatted_error("callback", "string expected, not: %s\n", lua_typename(L, t));
}
break;
- case callback_result_key:
+ case callback_result_s_key:
switch (t) {
case LUA_TNIL:
*va_arg(vl, int *) = 0;
@@ -342,6 +343,16 @@ static int callbacklib_aux_run(lua_State *L, int id, int special, const char *va
return tex_formatted_error("callback", "string, false or nil expected, not: %s\n", lua_typename(L, t));
}
break;
+ case callback_result_i_key:
+ switch (t) {
+ case LUA_TNUMBER:
+ *va_arg(vl, int *) = lmt_tointeger(L, nres);
+ break;
+ default:
+ /* *va_arg(vl, int *) = 0; */ /*tex We keep the value! */
+ break;
+ }
+ break;
default:
return tex_formatted_error("callback", "invalid value type returned\n");
}
diff --git a/source/luametatex/source/lua/lmtcallbacklib.h b/source/luametatex/source/lua/lmtcallbacklib.h
index 7801e1a70..50b1cb4d1 100644
--- a/source/luametatex/source/lua/lmtcallbacklib.h
+++ b/source/luametatex/source/lua/lmtcallbacklib.h
@@ -40,6 +40,7 @@ typedef enum callback_callback_types {
show_warning_message_callback,
hpack_quality_callback,
vpack_quality_callback,
+ show_break_callback,
insert_par_callback,
append_line_filter_callback,
build_page_insert_callback,
@@ -80,7 +81,8 @@ typedef enum callback_keys {
callback_lstring_key = 'L', /*tex a \LUA\ string (struct) */
callback_node_key = 'N', /*tex a \TEX\ node (halfword) */
callback_string_key = 'S', /*tex a \CCODE\ string */
- callback_result_key = 'R', /*tex a string (return value) but nil is also okay */
+ callback_result_s_key = 'R', /*tex a string (return value) but nil is also okay */
+ callback_result_i_key = 'r', /*tex a number (return value) but nil is also okay */
} callback_keys;
inline static int lmt_callback_defined (int a) { return lmt_callback_state.values[a]; }
diff --git a/source/luametatex/source/lua/lmtinterface.c b/source/luametatex/source/lua/lmtinterface.c
index 2c94feb6f..5132606de 100644
--- a/source/luametatex/source/lua/lmtinterface.c
+++ b/source/luametatex/source/lua/lmtinterface.c
@@ -174,6 +174,20 @@ void lmt_initialize_interface(void)
set_alignment_context_value(package_pass_alignment_context, package);
set_alignment_context_value(wrapup_pass_alignment_context, wrapup);
+ lmt_interface.break_context_values = lmt_aux_allocate_value_info(wrapup_show_breaks_context);
+
+ # define set_break_context_value(n,k) lmt_interface.break_context_values[n] = (value_info) { .lua = lua_key_index(k), .name = lua_key(k), .id = n }
+
+ set_break_context_value(initialize_show_breaks_context, initialize);
+ set_break_context_value(start_show_breaks_context, start);
+ set_break_context_value(list_show_breaks_context, list);
+ set_break_context_value(stop_show_breaks_context, stop);
+ set_break_context_value(collect_show_breaks_context, collect);
+ set_break_context_value(line_show_breaks_context, line);
+ set_break_context_value(delete_show_breaks_context, delete);
+ set_break_context_value(report_show_breaks_context, report);
+ set_break_context_value(wrapup_show_breaks_context, wrapup);
+
lmt_interface.par_begin_values = lmt_aux_allocate_value_info(vrule_char_par_begin);
# define set_par_begin_value(n,k) lmt_interface.par_begin_values[n] = (value_info) { .lua = lua_key_index(k), .name = lua_key(k), .id = n }
diff --git a/source/luametatex/source/lua/lmtinterface.h b/source/luametatex/source/lua/lmtinterface.h
index 47460acac..ea6ead4ab 100644
--- a/source/luametatex/source/lua/lmtinterface.h
+++ b/source/luametatex/source/lua/lmtinterface.h
@@ -341,6 +341,7 @@ typedef struct lmt_interface_info {
value_info *page_context_values;
value_info *append_line_context_values;
value_info *alignment_context_values;
+ value_info *break_context_values;
value_info *par_begin_values;
value_info *par_mode_values;
value_info *math_style_name_values;
@@ -540,6 +541,7 @@ make_lua_key(L, clubpenalty);\
make_lua_key(L, cmd);\
make_lua_key(L, cmdname);\
make_lua_key(L, collapse);\
+make_lua_key(L, collect);\
make_lua_key(L, combine_toks);\
make_lua_key(L, command);\
make_lua_key(L, comment);\
@@ -576,6 +578,7 @@ make_lua_key(L, define_font);\
make_lua_key(L, define_lua_call);\
make_lua_key(L, degree);\
make_lua_key(L, degreevariant);\
+make_lua_key(L, delete);\
make_lua_key(L, delimited);\
make_lua_key(L, DelimitedSubFormulaMinHeight);\
make_lua_key(L, delimiter);\
@@ -762,6 +765,7 @@ make_lua_key(L, index);\
make_lua_key(L, info);\
make_lua_key(L, Info);\
make_lua_key(L, inherited);\
+make_lua_key(L, initialize);\
make_lua_key(L, inner);\
make_lua_key(L, innerlocation);\
make_lua_key(L, innerxoffset);\
@@ -1113,6 +1117,7 @@ make_lua_key(L, relax);\
make_lua_key(L, remove_item);\
make_lua_key(L, repeat);\
make_lua_key(L, replace);\
+make_lua_key(L, report);\
make_lua_key(L, reserved);\
make_lua_key(L, reset);\
make_lua_key(L, rhmin);\
@@ -1170,6 +1175,7 @@ make_lua_key(L, shrink);\
make_lua_key(L, shrinkorder);\
make_lua_key(L, simple);\
make_lua_key(L, size);\
+make_lua_key(L, single);\
make_lua_key(L, skewchar);\
make_lua_key(L, SkewedDelimiterTolerance);\
make_lua_key(L, skeweddelimitertolerance);\
@@ -1222,6 +1228,7 @@ make_lua_key(L, stackvgap);\
make_lua_key(L, start);\
make_lua_key(L, state);\
make_lua_key(L, step);\
+make_lua_key(L, stop);\
make_lua_key(L, stretch);\
make_lua_key(L, stretchorder);\
make_lua_key(L, StretchStackBottomShiftDown);\
diff --git a/source/luametatex/source/lua/lmtnodelib.c b/source/luametatex/source/lua/lmtnodelib.c
index 7896eff65..ba2d0f0ba 100644
--- a/source/luametatex/source/lua/lmtnodelib.c
+++ b/source/luametatex/source/lua/lmtnodelib.c
@@ -548,7 +548,7 @@ static int nodelib_direct_setfont(lua_State *L)
case rule_node:
tex_set_rule_font(n, lmt_tohalfword(L, 2));
if (lua_type(L, 3) == LUA_TNUMBER) {
- rule_character(n) = lmt_tohalfword(L, 3);
+ rule_strut_character(n) = lmt_tohalfword(L, 3);
}
break;
case glue_node:
@@ -617,7 +617,7 @@ static int nodelib_direct_getchar(lua_State *L)
lua_pushinteger(L, glyph_character(n));
break;
case rule_node:
- lua_pushinteger(L, rule_character(n));
+ lua_pushinteger(L, rule_strut_character(n));
break;
case math_char_node:
case math_text_char_node:
@@ -646,7 +646,7 @@ static int nodelib_direct_setchar(lua_State *L)
glyph_character(n) = lmt_tohalfword(L, 2);
break;
case rule_node:
- rule_character(n) = lmt_tohalfword(L, 2);
+ rule_strut_character(n) = lmt_tohalfword(L, 2);
break;
case math_char_node:
case math_text_char_node:
@@ -674,7 +674,7 @@ static int nodelib_direct_getcharspec(lua_State *L)
lua_pushinteger(L, glyph_font(n));
return 2;
case rule_node:
- lua_pushinteger(L, rule_character(n));
+ lua_pushinteger(L, rule_strut_character(n));
lua_pushinteger(L, tex_get_rule_font(n, text_style));
break;
case simple_noad:
@@ -1489,8 +1489,8 @@ static int nodelib_direct_getoffsets(lua_State *L)
case rule_node:
lua_pushinteger(L, rule_x_offset(n));
lua_pushinteger(L, rule_y_offset(n));
- lua_pushinteger(L, rule_left(n));
- lua_pushinteger(L, rule_right(n));
+ lua_pushinteger(L, tex_get_rule_left(n));
+ lua_pushinteger(L, tex_get_rule_right(n));
return 4;
}
}
@@ -1537,10 +1537,10 @@ static int nodelib_direct_setoffsets(lua_State *L)
rule_y_offset(n) = (halfword) lmt_roundnumber(L, 3);
}
if (lua_type(L, 4) == LUA_TNUMBER) {
- rule_left(n) = (halfword) lmt_roundnumber(L, 4);
+ tex_set_rule_left(n, (halfword) lmt_roundnumber(L, 4));
}
if (lua_type(L, 5) == LUA_TNUMBER) {
- rule_right(n) = (halfword) lmt_roundnumber(L, 5);
+ tex_set_rule_right(n, (halfword) lmt_roundnumber(L, 5));
}
break;
}
@@ -1610,10 +1610,10 @@ static int nodelib_direct_addmargins(lua_State *L)
break;
case rule_node:
if (lua_type(L, 2) == LUA_TNUMBER) {
- rule_left(n) += (halfword) lmt_roundnumber(L, 2);
+ tex_set_rule_left(n, tex_get_rule_left(n) + (halfword) lmt_roundnumber(L, 2));
}
if (lua_type(L, 3) == LUA_TNUMBER) {
- rule_right(n) += (halfword) lmt_roundnumber(L, 3);
+ tex_set_rule_right(n, tex_get_rule_right(n) + (halfword) lmt_roundnumber(L, 3));
}
break;
}
@@ -2907,9 +2907,9 @@ static int nodelib_direct_getruledimensions(lua_State *L)
halfword n = nodelib_valid_direct_from_index(L, 1);
if (n && node_type(n) == rule_node) {
if (node_subtype(n) == virtual_rule_subtype) {
- lua_pushinteger(L, rule_data(n));
- lua_pushinteger(L, rule_left(n));
- lua_pushinteger(L, rule_right(n));
+ lua_pushinteger(L, rule_virtual_width(n));
+ lua_pushinteger(L, rule_virtual_height(n));
+ lua_pushinteger(L, rule_virtual_depth(n));
lua_pushboolean(L, 1);
} else {
lua_pushinteger(L, rule_width(n));
@@ -2923,6 +2923,32 @@ static int nodelib_direct_getruledimensions(lua_State *L)
}
}
+static int nodelib_direct_setruledimensions(lua_State *L)
+{
+ halfword n = nodelib_valid_direct_from_index(L, 1);
+ if (n && node_type(n) == rule_node) {
+ scaled wd = (scaled) lmt_roundnumber(L, 2);
+ scaled ht = (scaled) lmt_roundnumber(L, 3);
+ scaled dp = (scaled) lmt_roundnumber(L, 4);
+ if (node_subtype(n) == virtual_rule_subtype) {
+ rule_virtual_width(n) = wd;
+ rule_virtual_height(n) = ht;
+ rule_virtual_depth(n) = dp;
+ rule_width(n) = 0;
+ rule_height(n) = 0;
+ rule_depth(n) = 0;
+ } else {
+ rule_width(n) = wd;
+ rule_height(n) = ht;
+ rule_depth(n) = dp;
+ }
+ if (lua_type(L, 5) == LUA_TNUMBER) {
+ rule_data(n) = (halfword) lmt_roundnumber(L, 5);
+ }
+ }
+ return 0;
+}
+
/* node.direct.getlist */
static int nodelib_direct_getlist(lua_State *L)
@@ -6577,21 +6603,21 @@ static int nodelib_common_getfield(lua_State *L, int direct, halfword n)
} else if (lua_key_eq(s, total)) {
lua_pushinteger(L, rule_total(n));
} else if (lua_key_eq(s, xoffset)) {
- lua_pushinteger(L,rule_x_offset(n));
+ lua_pushinteger(L, rule_x_offset(n));
} else if (lua_key_eq(s, yoffset)) {
- lua_pushinteger(L,rule_y_offset(n));
+ lua_pushinteger(L, rule_y_offset(n));
} else if (lua_key_eq(s, left)) {
- lua_pushinteger(L,rule_left(n));
+ lua_pushinteger(L, tex_get_rule_left(n));
} else if (lua_key_eq(s, right)) {
- lua_pushinteger(L,rule_right(n));
+ lua_pushinteger(L, tex_get_rule_right(n));
} else if (lua_key_eq(s, data)) {
- lua_pushinteger(L,rule_data(n));
+ lua_pushinteger(L, rule_data(n));
} else if (lua_key_eq(s, font)) {
lua_pushinteger(L, tex_get_rule_font(n, text_style));
} else if (lua_key_eq(s, fam)) {
lua_pushinteger(L, tex_get_rule_font(n, text_style));
} else if (lua_key_eq(s, char)) {
- lua_pushinteger(L, rule_character(n));
+ lua_pushinteger(L, rule_strut_character(n));
} else {
lua_pushnil(L);
}
@@ -7262,9 +7288,9 @@ static int nodelib_common_setfield(lua_State *L, int direct, halfword n)
} else if (lua_key_eq(s, yoffset)) {
rule_y_offset(n) = (halfword) lmt_roundnumber(L, 3);
} else if (lua_key_eq(s, left)) {
- rule_left(n) = (halfword) lmt_roundnumber(L, 3);
+ tex_set_rule_left(n, (halfword) lmt_roundnumber(L, 3));
} else if (lua_key_eq(s, right)) {
- rule_right(n) = (halfword) lmt_roundnumber(L, 3);
+ tex_set_rule_right(n, (halfword) lmt_roundnumber(L, 3));
} else if (lua_key_eq(s, data)) {
rule_data(n) = lmt_tohalfword(L, 3);
} else if (lua_key_eq(s, font)) {
@@ -7272,7 +7298,7 @@ static int nodelib_common_setfield(lua_State *L, int direct, halfword n)
} else if (lua_key_eq(s, fam)) {
tex_set_rule_family(n, lmt_tohalfword(L, 3));
} else if (lua_key_eq(s, char)) {
- rule_character(n) = lmt_tohalfword(L, 3);
+ rule_strut_character(n) = lmt_tohalfword(L, 3);
} else {
goto CANTSET;
}
@@ -9809,6 +9835,7 @@ static const struct luaL_Reg nodelib_direct_function_list[] = {
{ "getkerndimension", nodelib_direct_getkerndimension },
{ "getlistdimensions", nodelib_direct_getlistdimensions },
{ "getruledimensions", nodelib_direct_getruledimensions },
+ { "setruledimensions", nodelib_direct_setruledimensions },
{ "patchattributes", nodelib_direct_patchattributes },
{ "remove", nodelib_direct_remove },
{ "removefromlist", nodelib_direct_remove_from_list },
diff --git a/source/luametatex/source/lua/lmtstatuslib.c b/source/luametatex/source/lua/lmtstatuslib.c
index ee785e806..2613115da 100644
--- a/source/luametatex/source/lua/lmtstatuslib.c
+++ b/source/luametatex/source/lua/lmtstatuslib.c
@@ -356,6 +356,8 @@ static int statslib_getconstants(lua_State *L)
lua_set_integer_by_key(L, "assumed_math_control", assumed_math_control);
lua_set_integer_by_key(L, "undefined_math_parameter", undefined_math_parameter);
+
+ lua_set_integer_by_key(L, "max_calculated_badness", max_calculated_badness);
return 1;
}
diff --git a/source/luametatex/source/lua/lmttexlib.c b/source/luametatex/source/lua/lmttexlib.c
index 39afd94fb..1bfd2c38c 100644
--- a/source/luametatex/source/lua/lmttexlib.c
+++ b/source/luametatex/source/lua/lmttexlib.c
@@ -2782,6 +2782,7 @@ static int texlib_aux_convert(lua_State *L, int cur_code)
case cs_active_code: /* arg token */
/* case cs_lastname_code: */ /* arg token */
case detokenized_code: /* arg token */
+ case detokened_code: /* arg cs or {} */
case meaning_code: /* arg token */
case to_mathstyle_code:
break;
@@ -5016,7 +5017,7 @@ static int texlib_getglyphoptionvalues(lua_State *L)
static int texlib_getnoadoptionvalues(lua_State *L)
{
- lua_createtable(L, 2, 36);
+ lua_createtable(L, 2, 37);
lua_push_key_at_index(L, axis, noad_option_axis);
lua_push_key_at_index(L, noaxis, noad_option_no_axis);
lua_push_key_at_index(L, exact, noad_option_exact);
@@ -5058,6 +5059,7 @@ static int texlib_getnoadoptionvalues(lua_State *L)
lua_push_key_at_index(L, center, noad_option_center);
lua_push_key_at_index(L, scale, noad_option_scale);
lua_push_key_at_index(L, keepbase, noad_option_keep_base);
+ lua_push_key_at_index(L, single, noad_option_single);
// lua_set_string_by_index(L, noad_option_keep_base, "keepbase");
return 1;
@@ -5158,7 +5160,7 @@ static int texlib_getspecialmathclassvalues(lua_State *L)
static int texlib_getmathclassoptionvalues(lua_State *L)
{
- lua_createtable(L, 2, 19);
+ lua_createtable(L, 2, 20);
lua_set_string_by_index(L, no_pre_slack_class_option, "nopreslack");
lua_set_string_by_index(L, no_post_slack_class_option, "nopostslack");
lua_set_string_by_index(L, left_top_kern_class_option, "lefttopkern");
@@ -5185,6 +5187,7 @@ static int texlib_getmathclassoptionvalues(lua_State *L)
lua_set_string_by_index(L, auto_inject_class_option, "autoinject");
lua_set_string_by_index(L, remove_italic_correction_class_option, "removeitaliccorrection");
lua_set_string_by_index(L, operator_italic_correction_class_option, "operatoritaliccorrection");
+ lua_set_string_by_index(L, short_inline_class_option, "shortinline");
return 1;
}
@@ -5209,6 +5212,7 @@ static int texlib_getnormalizeparvalues(lua_State *L)
lua_createtable(L, 2, 0);
lua_set_string_by_index(L, normalize_par_mode, "normalizepar");
lua_set_string_by_index(L, flatten_v_leaders_mode, "flattenvleaders");
+ lua_set_string_by_index(L, limit_prev_graf_mode, "limitprevgraf");
return 1;
}
@@ -5397,6 +5401,11 @@ static int texlib_getalignmentcontextvalues(lua_State *L)
return lmt_push_info_values(L, lmt_interface.alignment_context_values);
}
+static int texlib_getbreakcontextvalues(lua_State *L)
+{
+ return lmt_push_info_values(L, lmt_interface.break_context_values);
+}
+
static int texlib_getparbeginvalues(lua_State *L)
{
return lmt_push_info_values(L, lmt_interface.par_begin_values);
@@ -5481,6 +5490,18 @@ static int texlib_gettextcontrolvalues(lua_State *L)
return 1;
}
+static int texlib_getfitnessvalues(lua_State *L)
+{
+ lua_createtable(L, 5, 1);
+ lua_set_string_by_index(L, very_loose_fit, "veryloose");
+ lua_set_string_by_index(L, loose_fit, "loose");
+ lua_set_string_by_index(L, semi_loose_fit, "semiloose");
+ lua_set_string_by_index(L, decent_fit, "decent");
+ lua_set_string_by_index(L, semi_tight_fit, "semitight");
+ lua_set_string_by_index(L, tight_fit, "tight");
+ return 1;
+}
+
static int texlib_getfillvalues(lua_State *L)
{
return lmt_push_info_values(L, lmt_interface.node_fill_values);
@@ -5772,12 +5793,14 @@ static const struct luaL_Reg texlib_function_list[] = {
/* {"getmathflattenvalues", texlib_getmathflattenvalues }, */
{ "getmathcontrolvalues", texlib_getmathcontrolvalues },
{ "gettextcontrolvalues", texlib_gettextcontrolvalues },
+ { "getfitnessvalues", texlib_getfitnessvalues },
{ "getpacktypevalues", texlib_getpacktypevalues },
{ "getgroupvalues", texlib_getgroupvalues },
{ "getparcontextvalues", texlib_getparcontextvalues },
{ "getpagecontextvalues", texlib_getpagecontextvalues },
{ "getappendlinecontextvalues", texlib_getappendlinecontextvalues },
{ "getalignmentcontextvalues", texlib_getalignmentcontextvalues },
+ { "getbreakcontextvalues", texlib_getbreakcontextvalues },
{ "getparbeginvalues", texlib_getparbeginvalues },
{ "getparmodevalues", texlib_getparmodevalues },
{ "getautomigrationvalues", texlib_getautomigrationvalues },
diff --git a/source/luametatex/source/lua/lmttokenlib.c b/source/luametatex/source/lua/lmttokenlib.c
index c8f7f9039..fd3b95b0f 100644
--- a/source/luametatex/source/lua/lmttokenlib.c
+++ b/source/luametatex/source/lua/lmttokenlib.c
@@ -213,7 +213,6 @@ void lmt_tokenlib_initialize(void)
lmt_interface.command_names[convert_cmd] = (command_item) { .id = convert_cmd, .lua = lua_key_index(convert), .name = lua_key(convert), .kind = regular_command_item, .min = 0, .max = last_convert_code, .base = 0, .fixedvalue = 0 };
lmt_interface.command_names[the_cmd] = (command_item) { .id = the_cmd, .lua = lua_key_index(the), .name = lua_key(the), .kind = regular_command_item, .min = 0, .max = last_the_code, .base = 0, .fixedvalue = 0 };
lmt_interface.command_names[get_mark_cmd] = (command_item) { .id = get_mark_cmd, .lua = lua_key_index(get_mark), .name = lua_key(get_mark), .kind = regular_command_item, .min = 0, .max = last_get_mark_code, .base = 0, .fixedvalue = 0 };
- /* lmt_interface.command_names[string_cmd] = (command_item) { .id = string_cmd, .lua = lua_key_index(string), .name = lua_key(string), .kind = regular_command_item, .min = ignore_entry, .max = max_integer, .base = 0, .fixedvalue = 0 }; */
lmt_interface.command_names[call_cmd] = (command_item) { .id = call_cmd, .lua = lua_key_index(call), .name = lua_key(call), .kind = token_command_item, .min = ignore_entry, .max = ignore_entry, .base = ignore_entry, .fixedvalue = 0 };
lmt_interface.command_names[protected_call_cmd] = (command_item) { .id = protected_call_cmd, .lua = lua_key_index(protected_call), .name = lua_key(protected_call), .kind = token_command_item, .min = ignore_entry, .max = ignore_entry, .base = ignore_entry, .fixedvalue = 0 };
lmt_interface.command_names[semi_protected_call_cmd] = (command_item) { .id = semi_protected_call_cmd, .lua = lua_key_index(semi_protected_call), .name = lua_key(protected_call), .kind = token_command_item, .min = ignore_entry, .max = ignore_entry, .base = ignore_entry, .fixedvalue = 0 };
@@ -1431,6 +1430,54 @@ static int tokenlib_scan_tokenlist(lua_State *L)
return 1;
}
+static int tokenlib_scan_detokened(lua_State *L)
+{
+ saved_tex_scanner texstate = tokenlib_aux_save_tex_scanner();
+ int expand = lua_toboolean(L, 1);
+ halfword defref = lmt_input_state.def_ref;
+ halfword result = null;
+ int macro = 0;
+ tokenlib_aux_goto_first_candidate(); /*tex We don't expand the next token! */
+ switch (cur_cmd) {
+ case left_brace_cmd:
+ result = expand ? tex_scan_toks_expand(1, NULL, 0) : tex_scan_toks_normal(1, NULL);
+ break;
+ case call_cmd:
+ case protected_call_cmd:
+ case semi_protected_call_cmd:
+ case tolerant_call_cmd:
+ case tolerant_protected_call_cmd:
+ case tolerant_semi_protected_call_cmd:
+ result = cur_chr;
+ macro = 1;
+ break;
+ default:
+ tex_back_input(cur_tok);
+ break;
+ }
+ tokenlib_aux_unsave_tex_scanner(texstate);
+ lmt_input_state.def_ref = defref;
+ if (result) {
+ if (token_link(result)) {
+ tex_detokenize_list(token_link(result));
+ if (lmt_string_pool_state.string_temp && lmt_string_pool_state.string_temp_top) {
+ lua_pushlstring(L, (char *) lmt_string_pool_state.string_temp, lmt_string_pool_state.string_temp_top);
+ } else {
+ lua_pushliteral(L,"");
+ }
+ tex_reset_cur_string();
+ } else {
+ lua_pushliteral(L,"");
+ }
+ if (! macro) {
+ tex_flush_token_list(result);
+ }
+ } else {
+ lua_pushnil(L);
+ }
+ return 1;
+}
+
/* todo: other call_cmd */
static int tokenlib_scan_string(lua_State *L)
@@ -3630,6 +3677,7 @@ static const struct luaL_Reg tokenlib_function_list[] = {
{ "scancsname", tokenlib_scan_csname },
{ "scantoken", tokenlib_scan_token }, /* expands next token if needed */
{ "scanbox", tokenlib_scan_box },
+ { "scandetokened", tokenlib_scan_detokened },
{ "isnextchar", tokenlib_is_next_char },
/* writers */
{ "putnext", tokenlib_put_next },
diff --git a/source/luametatex/source/luacore/lua54/originals/patches.txt b/source/luametatex/source/luacore/lua54/originals/patches.txt
index 8a3fc4363..ebd644527 100644
--- a/source/luametatex/source/luacore/lua54/originals/patches.txt
+++ b/source/luametatex/source/luacore/lua54/originals/patches.txt
@@ -1,11 +1,11 @@
---------------------------------------------------------------------------------------
-lctype.h : no longer needed as we dan use -DLUA_UCID now
---------------------------------------------------------------------------------------
-
-/* lislalpha(c) testprop(c, MASK(ALPHABIT)) */
-/* lislalnum(c) testprop(c, (MASK(ALPHABIT) | MASK(DIGITBIT))) */
-
-# define lislalpha(c) (testprop(c, MASK(ALPHABIT)) || (c) > 0x7f)
-# define lislalnum(c) (testprop(c, (MASK(ALPHABIT) | MASK(DIGITBIT))) || (c) > 0x7f)
-
---------------------------------------------------------------------------------------
+--------------------------------------------------------------------------------------
+lctype.h : no longer needed as we dan use -DLUA_UCID now
+--------------------------------------------------------------------------------------
+
+/* lislalpha(c) testprop(c, MASK(ALPHABIT)) */
+/* lislalnum(c) testprop(c, (MASK(ALPHABIT) | MASK(DIGITBIT))) */
+
+# define lislalpha(c) (testprop(c, MASK(ALPHABIT)) || (c) > 0x7f)
+# define lislalnum(c) (testprop(c, (MASK(ALPHABIT) | MASK(DIGITBIT))) || (c) > 0x7f)
+
+--------------------------------------------------------------------------------------
diff --git a/source/luametatex/source/luacore/lua54/readme.txt b/source/luametatex/source/luacore/lua54/readme.txt
index 5637f04ae..5b3ad4918 100644
--- a/source/luametatex/source/luacore/lua54/readme.txt
+++ b/source/luametatex/source/luacore/lua54/readme.txt
@@ -1,8 +1,8 @@
-This is Lua 5.4 as taken from: https://github.com/lua/lua.git (intermediate releases). For
-installation instructions, license details, and further information about Lua, see the
-documentation of LUA.
-
-There is a pitfall in using release candidates: when the bytecode organization changes
-we can get crashes. At some point the luac version became an integer so we could encode
-a subnumber but that was reverted to a byte. This means that we again can get crashes
-(unless we mess a bit with that byte). It makes usage a bit fragile but so be it.
+This is Lua 5.4 as taken from: https://github.com/lua/lua.git (intermediate releases). For
+installation instructions, license details, and further information about Lua, see the
+documentation of LUA.
+
+There is a pitfall in using release candidates: when the bytecode organization changes
+we can get crashes. At some point the luac version became an integer so we could encode
+a subnumber but that was reverted to a byte. This means that we again can get crashes
+(unless we mess a bit with that byte). It makes usage a bit fragile but so be it.
diff --git a/source/luametatex/source/luacore/luapeg/readme.txt b/source/luametatex/source/luacore/luapeg/readme.txt
index 17b6b404d..d8ba6b709 100644
--- a/source/luametatex/source/luacore/luapeg/readme.txt
+++ b/source/luametatex/source/luacore/luapeg/readme.txt
@@ -1,9 +1,9 @@
-Commented line in lptypes.h:
-
- # include <assert.h>
-
-Added line in lptypes.h:
-
- # define assert(condition) ((void)0)
-
+Commented line in lptypes.h:
+
+ # include <assert.h>
+
+Added line in lptypes.h:
+
+ # define assert(condition) ((void)0)
+
Maybe some day lua_assert will be used in lpeg. \ No newline at end of file
diff --git a/source/luametatex/source/luacore/readme.txt b/source/luametatex/source/luacore/readme.txt
index 23eb77311..5156e1047 100644
--- a/source/luametatex/source/luacore/readme.txt
+++ b/source/luametatex/source/luacore/readme.txt
@@ -1,34 +1,34 @@
-About luasocket and luasec:
-
-Till mid 2021 we had the luasec code in the source tree but it was not used yet. It requires
-openssl which is pretty large and we need a bunch of header files. In order to compile luasec
-we need openssl headers and unfortunately there are a few included files that one need to
-make. This create a dependency unless we make a few simple ones; after all we only need it for
-a few platforms. I couldn't locate a neutral header set so it never came to compilation (I
-started making a set myself but could not motivate myself to finish it). We could use it as
-optional library (which then demands a bit different interface). But, no matter what we
-decide, we definitely don't want to compile openssl and include it in the binary. One problem
-with these additional libraries is that they add more code than luametatex itself has so that
-makes no sense.
-
-For the record, an alternative is to use the more lightweight armmbed or polarssl library but
-then I need either to make wrappers or adapt the luasec code.
-
-Anyway, when we consider secure http we also enter the endless updating of protocols because
-the internet is more and more wrapped in security due to lack of control over bad behaviour
-and abuse around it. Plugging holes is not among the objectives of this project also because
-it conflicts with long term stability of what basically is a typesetting engine.
-
-On a positive note, when we use sockets to serve http we can hide behind a proxy, for instance
-nginx is easy to set up and Lua(Meta)TeX happily sits behind it. When downloading something we
-need to cache anyway so then we can as well use libcurl for which we have interfaces built in
-already. If installing openssl is considered a valid option, then libcurl can hardly be seen
-as a hurdle. We probably need that anyway some day in the installer and updater.
-
-The basic socket library is quite stable. In ConTeXt the Lua files already have been 'redone'
-to fit it the lot. In the code base some C files have been removed (serial and unix specific
-stuff) and at some point I might decide to strip away the files and functionality that we
-don't need. Occasionally there are updates to the library but in general it's rather long
-term stable.
-
+About luasocket and luasec:
+
+Till mid 2021 we had the luasec code in the source tree but it was not used yet. It requires
+openssl which is pretty large and we need a bunch of header files. In order to compile luasec
+we need openssl headers and unfortunately there are a few included files that one need to
+make. This create a dependency unless we make a few simple ones; after all we only need it for
+a few platforms. I couldn't locate a neutral header set so it never came to compilation (I
+started making a set myself but could not motivate myself to finish it). We could use it as
+optional library (which then demands a bit different interface). But, no matter what we
+decide, we definitely don't want to compile openssl and include it in the binary. One problem
+with these additional libraries is that they add more code than luametatex itself has so that
+makes no sense.
+
+For the record, an alternative is to use the more lightweight armmbed or polarssl library but
+then I need either to make wrappers or adapt the luasec code.
+
+Anyway, when we consider secure http we also enter the endless updating of protocols because
+the internet is more and more wrapped in security due to lack of control over bad behaviour
+and abuse around it. Plugging holes is not among the objectives of this project also because
+it conflicts with long term stability of what basically is a typesetting engine.
+
+On a positive note, when we use sockets to serve http we can hide behind a proxy, for instance
+nginx is easy to set up and Lua(Meta)TeX happily sits behind it. When downloading something we
+need to cache anyway so then we can as well use libcurl for which we have interfaces built in
+already. If installing openssl is considered a valid option, then libcurl can hardly be seen
+as a hurdle. We probably need that anyway some day in the installer and updater.
+
+The basic socket library is quite stable. In ConTeXt the Lua files already have been 'redone'
+to fit it the lot. In the code base some C files have been removed (serial and unix specific
+stuff) and at some point I might decide to strip away the files and functionality that we
+don't need. Occasionally there are updates to the library but in general it's rather long
+term stable.
+
So to summarize: luasocket stayed and luasec is no longer considered as a built-in. \ No newline at end of file
diff --git a/source/luametatex/source/luametatex.h b/source/luametatex/source/luametatex.h
index 2e3b53e71..2a99867e8 100644
--- a/source/luametatex/source/luametatex.h
+++ b/source/luametatex/source/luametatex.h
@@ -92,7 +92,7 @@
# define luametatex_version 210
# define luametatex_revision 9
# define luametatex_version_string "2.10.09"
-# define luametatex_development_id 20230508
+# define luametatex_development_id 20230525
# define luametatex_name_camelcase "LuaMetaTeX"
# define luametatex_name_lowercase "luametatex"
diff --git a/source/luametatex/source/luaoptional/cmake/mujs/CMakeLists.txt b/source/luametatex/source/luaoptional/cmake/mujs/CMakeLists.txt
index cfe2ee2bf..83b6b48ac 100644
--- a/source/luametatex/source/luaoptional/cmake/mujs/CMakeLists.txt
+++ b/source/luametatex/source/luaoptional/cmake/mujs/CMakeLists.txt
@@ -1,107 +1,107 @@
-# This file is made by Mojca and Hans and is subjected to changes
-# as we proceed with luametatex and the contextgarden compile farm.
-
-cmake_minimum_required(VERSION 3.7)
-
-# Lucky us: only normal C is used:
-
-project (mujs
- VERSION 1.0.6
- DESCRIPTION "MuJS embeddable Javascript interpreter"
- LANGUAGES C)
-
-# The jsrepr.c is not needed and depends on utf.c as well has some function
-# pointer cast issue (accessing unknown name field).
-
-set (mujs_sources
- jsarray.c
- jsboolean.c
- jsbuiltin.c
- jscompile.c
- jsdate.c
- jsdtoa.c
- jsdump.c
- jserror.c
- jsfunction.c
- jsgc.c
- jsintern.c
- jslex.c
- jsmath.c
- jsnumber.c
- jsobject.c
- json.c
- jsparse.c
- jsproperty.c
- jsregexp.c
-# jsrepr.c
- jsrun.c
- jsstate.c
- jsstring.c
- jsvalue.c
- regexp.c
- utf.c
- utftype.c
-)
-
-set (mujs_headers
- jsbuiltin.h
- jscompile.h
- jsi.h
- jslex.h
- jsparse.h
- jsrun.h
- jsvalue.h
- mujs.h
- regexp.h
- utf.h
-)
-
-# We need this in order for msvc to export the symbols (which is default on
-# gcc). Otherwise we need this dllexport stuff.
-
-set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON)
-
-# The previous one has to come before the next one!
-
-if (MSVC)
-
- add_library(mujs SHARED ${mujs_headers} ${mujs_sources} )
-
-else()
-
- # Is this hack still needed or does the above work ok.?
-
- add_library(mujs SHARED ${mujs_headers} one.c)
-
-endif()
-
-# As per make file.
-
-set_property(TARGET mujs PROPERTY C_STANDARD 99)
-
-# We want consistent and predictable names:
-
-set_target_properties(mujs PROPERTIES OUTPUT_NAME "libmujs")
-
-# Some options based on what's in upstream's make file.
-
-if (NOT MSVC)
-
- target_compile_options(mujs
- PRIVATE
- -pedantic
- -Wall
- -Wextra
- -Wno-unused-parameter
- )
-
- if (CMAKE_C_COMPILER_ID STREQUAL "Clang")
-
- target_compile_options(mujs
- PRIVATE
- -Wunreachable-code
- )
-
- endif()
-
-endif()
+# This file is made by Mojca and Hans and is subjected to changes
+# as we proceed with luametatex and the contextgarden compile farm.
+
+cmake_minimum_required(VERSION 3.7)
+
+# Lucky us: only normal C is used:
+
+project (mujs
+ VERSION 1.0.6
+ DESCRIPTION "MuJS embeddable Javascript interpreter"
+ LANGUAGES C)
+
+# The jsrepr.c is not needed and depends on utf.c as well has some function
+# pointer cast issue (accessing unknown name field).
+
+set (mujs_sources
+ jsarray.c
+ jsboolean.c
+ jsbuiltin.c
+ jscompile.c
+ jsdate.c
+ jsdtoa.c
+ jsdump.c
+ jserror.c
+ jsfunction.c
+ jsgc.c
+ jsintern.c
+ jslex.c
+ jsmath.c
+ jsnumber.c
+ jsobject.c
+ json.c
+ jsparse.c
+ jsproperty.c
+ jsregexp.c
+# jsrepr.c
+ jsrun.c
+ jsstate.c
+ jsstring.c
+ jsvalue.c
+ regexp.c
+ utf.c
+ utftype.c
+)
+
+set (mujs_headers
+ jsbuiltin.h
+ jscompile.h
+ jsi.h
+ jslex.h
+ jsparse.h
+ jsrun.h
+ jsvalue.h
+ mujs.h
+ regexp.h
+ utf.h
+)
+
+# We need this in order for msvc to export the symbols (which is default on
+# gcc). Otherwise we need this dllexport stuff.
+
+set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON)
+
+# The previous one has to come before the next one!
+
+if (MSVC)
+
+ add_library(mujs SHARED ${mujs_headers} ${mujs_sources} )
+
+else()
+
+ # Is this hack still needed or does the above work ok.?
+
+ add_library(mujs SHARED ${mujs_headers} one.c)
+
+endif()
+
+# As per make file.
+
+set_property(TARGET mujs PROPERTY C_STANDARD 99)
+
+# We want consistent and predictable names:
+
+set_target_properties(mujs PROPERTIES OUTPUT_NAME "libmujs")
+
+# Some options based on what's in upstream's make file.
+
+if (NOT MSVC)
+
+ target_compile_options(mujs
+ PRIVATE
+ -pedantic
+ -Wall
+ -Wextra
+ -Wno-unused-parameter
+ )
+
+ if (CMAKE_C_COMPILER_ID STREQUAL "Clang")
+
+ target_compile_options(mujs
+ PRIVATE
+ -Wunreachable-code
+ )
+
+ endif()
+
+endif()
diff --git a/source/luametatex/source/luaoptional/readme.txt b/source/luametatex/source/luaoptional/readme.txt
index 31b489da9..579433d61 100644
--- a/source/luametatex/source/luaoptional/readme.txt
+++ b/source/luametatex/source/luaoptional/readme.txt
@@ -1,30 +1,30 @@
-Nota bene,
-
-This is the directory where optional module support ends up. Optional modules have an interface but
-are not (nor will be) part of the binary. We might ship some at the context garden (like zint and
-mujs) but the large one (read: with many dependencies or written in c++) have to come from the
-operating system because if you use a library that is what you want: the external black box thing.
-No sources end up in the distribution either, athough we will archive some.
-
-There will be no user modules here, just those interfaces that we provide and maintain as part of
-standard ConTeXt LMTX. What users add themselves is up to them, including (long time !) support. So,
-this is the canonnical version of optional.
-
-We might at some point add some safeguards so that we can be sure that ConTeXt is run with the
-right binary because we want to prevent side effects (of any kind) resulting from a binary being
-used with the same name and different features ... just because one of the objective is to have
-a long term stable binary / macro package combination. Of course, what users do on their machines
-is up to them.
-
-It might take a while before the interfaces and way we do this is stable. Also, keep in mind that
-regular users never deal with these matters directly and only use the interfaces at the TeX and
-Lua end.
-
-PS. The socket library (and maybe cerf) are also candidates for optional although cerf needs to be
-compiled for windows which is not supported out of the box and sockets are way to large. We only
-do optional libs that add little to the binary, a few KB at most! I'll definitely try to stick to
-this principle!
-
-PS. Todo: move function pointers into state structures.
-
-Hans
+Nota bene,
+
+This is the directory where optional module support ends up. Optional modules have an interface but
+are not (nor will be) part of the binary. We might ship some at the context garden (like zint and
+mujs) but the large one (read: with many dependencies or written in c++) have to come from the
+operating system because if you use a library that is what you want: the external black box thing.
+No sources end up in the distribution either, athough we will archive some.
+
+There will be no user modules here, just those interfaces that we provide and maintain as part of
+standard ConTeXt LMTX. What users add themselves is up to them, including (long time !) support. So,
+this is the canonnical version of optional.
+
+We might at some point add some safeguards so that we can be sure that ConTeXt is run with the
+right binary because we want to prevent side effects (of any kind) resulting from a binary being
+used with the same name and different features ... just because one of the objective is to have
+a long term stable binary / macro package combination. Of course, what users do on their machines
+is up to them.
+
+It might take a while before the interfaces and way we do this is stable. Also, keep in mind that
+regular users never deal with these matters directly and only use the interfaces at the TeX and
+Lua end.
+
+PS. The socket library (and maybe cerf) are also candidates for optional although cerf needs to be
+compiled for windows which is not supported out of the box and sockets are way to large. We only
+do optional libs that add little to the binary, a few KB at most! I'll definitely try to stick to
+this principle!
+
+PS. Todo: move function pointers into state structures.
+
+Hans
diff --git a/source/luametatex/source/mp/mpc/mpmath.c b/source/luametatex/source/mp/mpc/mpmath.c
index 0a848b23e..d58d4f00f 100644
--- a/source/luametatex/source/mp/mpc/mpmath.c
+++ b/source/luametatex/source/mp/mpc/mpmath.c
@@ -5,7 +5,7 @@
# include "mpmath.h"
# include "mpstrings.h"
-# define coef_bound 04525252525
+# define coef_bound 0x25555555
# define fraction_threshold 2685
# define half_fraction_threshold 1342
# define scaled_threshold 8
@@ -18,9 +18,9 @@
# define three (3*unity)
# define half_unit (unity/2)
# define three_quarter_unit (3*(unity/4))
-# define EL_GORDO 0x7fffffff
+# define EL_GORDO 0x7FFFFFFF
# define negative_EL_GORDO (-EL_GORDO)
-# define one_third_EL_GORDO 05252525252
+# define one_third_EL_GORDO 0x2AAAAAAA
# define TWEXP31 2147483648.0
# define TWEXP28 268435456.0
# define TWEXP16 65536.0
@@ -602,7 +602,7 @@ static char *mp_string_scaled (MP mp, int s)
scaled_string[i++] = '.';
do {
if (delta > unity) {
- s = s + 0100000 - (delta / 2);
+ s = s + 0x8000 - (delta / 2);
}
scaled_string[i++] = '0' + (s / unity);
s = 10 * (s % unity);
@@ -646,7 +646,7 @@ static int mp_make_fraction (MP mp, int p, int q)
return EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((q > 0 ? -q : q) & 077777) * (((i & 037777) << 1) - 1) & 04000) != 0) {
+ if (d == (double) i && (((q > 0 ? -q : q) & 0x7FFF) * (((i & 0x3FFF) << 1) - 1) & 0x800) != 0) {
--i;
}
return i;
@@ -658,7 +658,7 @@ static int mp_make_fraction (MP mp, int p, int q)
return -negative_EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((q > 0 ? q : -q) & 077777) * (((i & 037777) << 1) + 1) & 04000) != 0) {
+ if (d == (double) i && (((q > 0 ? q : -q) & 0x7FFF) * (((i & 0x3FFF) << 1) + 1) & 0x800) != 0) {
++i;
}
return i;
@@ -678,13 +678,13 @@ int mp_take_fraction (MP mp, int p, int q)
if ((p ^ q) >= 0) {
d += 0.5;
if (d >= TWEXP31) {
- if (d != TWEXP31 || (((p & 077777) * (q & 077777)) & 040000) == 0) {
+ if (d != TWEXP31 || (((p & 0x7FFF) * (q & 0x7FFF)) & 040000) == 0) {
mp->arith_error = 1;
}
return EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((p & 077777) * (q & 077777)) & 040000) != 0) {
+ if (d == (double) i && (((p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) != 0) {
--i;
}
return i;
@@ -692,13 +692,13 @@ int mp_take_fraction (MP mp, int p, int q)
} else {
d -= 0.5;
if (d <= -TWEXP31) {
- if (d != -TWEXP31 || ((-(p & 077777) * (q & 077777)) & 040000) == 0) {
+ if (d != -TWEXP31 || ((-(p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) == 0) {
mp->arith_error = 1;
}
return -negative_EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && ((-(p & 077777) * (q & 077777)) & 040000) != 0) {
+ if (d == (double) i && ((-(p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) != 0) {
++i;
}
return i;
@@ -717,13 +717,13 @@ static int mp_take_scaled (MP mp, int p, int q)
if ((p ^ q) >= 0) {
d += 0.5;
if (d >= TWEXP31) {
- if (d != TWEXP31 || (((p & 077777) * (q & 077777)) & 040000) == 0) {
+ if (d != TWEXP31 || (((p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) == 0) {
mp->arith_error = 1;
}
return EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((p & 077777) * (q & 077777)) & 040000) != 0) {
+ if (d == (double) i && (((p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) != 0) {
--i;
}
return i;
@@ -731,13 +731,13 @@ static int mp_take_scaled (MP mp, int p, int q)
} else {
d -= 0.5;
if (d <= -TWEXP31) {
- if (d != -TWEXP31 || ((-(p & 077777) * (q & 077777)) & 040000) == 0) {
+ if (d != -TWEXP31 || ((-(p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) == 0) {
mp->arith_error = 1;
}
return -negative_EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && ((-(p & 077777) * (q & 077777)) & 040000) != 0) {
+ if (d == (double) i && ((-(p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) != 0) {
++i;
}
return i;
@@ -764,7 +764,7 @@ int mp_make_scaled (MP mp, int p, int q)
return EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((q > 0 ? -q : q) & 077777) * (((i & 037777) << 1) - 1) & 04000) != 0) {
+ if (d == (double) i && (((q > 0 ? -q : q) & 0x7FFF) * (((i & 0x3FFF) << 1) - 1) & 0x800) != 0) {
--i;
}
return i;
@@ -776,7 +776,7 @@ int mp_make_scaled (MP mp, int p, int q)
return -negative_EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((q > 0 ? q : -q) & 077777) * (((i & 037777) << 1) + 1) & 04000) != 0) {
+ if (d == (double) i && (((q > 0 ? q : -q) & 0x7FFF) * (((i & 0x3FFF) << 1) + 1) & 0x800) != 0) {
++i;
}
return i;
@@ -1239,7 +1239,7 @@ void mp_m_exp (MP mp, mp_number *ret, mp_number *x_orig)
} else {
if (x <= 0) {
z = -8 * x;
- y = 04000000;
+ y = 0x100000;
} else {
if (x <= 127919879) {
z = 1023359037 - 8 * x;
diff --git a/source/luametatex/source/mp/mpw/mpmath.w b/source/luametatex/source/mp/mpw/mpmath.w
index 5e9592fc7..7b43da72b 100644
--- a/source/luametatex/source/mp/mpw/mpmath.w
+++ b/source/luametatex/source/mp/mpw/mpmath.w
@@ -107,7 +107,7 @@ static char *mp_number_tostring (MP mp, mp_number *n);
static char *mp_string_scaled (MP mp, int s);
@
-@d coef_bound 04525252525 /* |fraction| approximation to 7/3 */
+@d coef_bound 0x25555555 /* |fraction| approximation to 7/3 */ /* 04525252525 */
@d fraction_threshold 2685 /* a |fraction| coefficient less than this is zeroed */
@d half_fraction_threshold 1342 /* half of |fraction_threshold| */
@d scaled_threshold 8 /* a |scaled| coefficient less than this is zeroed */
@@ -125,9 +125,9 @@ from the right end of a binary computer word.
@d three (3*unity) /* $2^{17}+2^{16}$, represents 3.00000 */
@d half_unit (unity/2) /* $2^{15}$, represents 0.50000 */
@d three_quarter_unit (3*(unity/4)) /* $3\cdot2^{14}$, represents 0.75000 */
-@d EL_GORDO 0x7fffffff /* $2^{31}-1$, the largest value that \MP\ likes */
+@d EL_GORDO 0x7FFFFFFF /* $2^{31}-1$, the largest value that \MP\ likes */
@d negative_EL_GORDO (-EL_GORDO)
-@d one_third_EL_GORDO 05252525252
+@d one_third_EL_GORDO 0x2AAAAAAA /* 05252525252 */
@ We need these preprocessor values
@@ -685,7 +685,7 @@ static char *mp_string_scaled (MP mp, int s)
do {
/* round the final digit */
if (delta > unity) {
- s = s + 0100000 - (delta / 2);
+ s = s + 0x8000 - (delta / 2);
}
scaled_string[i++] = '0' + (s / unity);
s = 10 * (s % unity);
@@ -769,7 +769,7 @@ static int mp_make_fraction (MP mp, int p, int q)
return EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((q > 0 ? -q : q) & 077777) * (((i & 037777) << 1) - 1) & 04000) != 0) {
+ if (d == (double) i && (((q > 0 ? -q : q) & 0x7FFF) * (((i & 0x3FFF) << 1) - 1) & 0x800) != 0) {
--i;
}
return i;
@@ -781,7 +781,7 @@ static int mp_make_fraction (MP mp, int p, int q)
return -negative_EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((q > 0 ? q : -q) & 077777) * (((i & 037777) << 1) + 1) & 04000) != 0) {
+ if (d == (double) i && (((q > 0 ? q : -q) & 0x7FFF) * (((i & 0x3FFF) << 1) + 1) & 0x800) != 0) {
++i;
}
return i;
@@ -811,13 +811,13 @@ int mp_take_fraction (MP mp, int p, int q)
if ((p ^ q) >= 0) {
d += 0.5;
if (d >= TWEXP31) {
- if (d != TWEXP31 || (((p & 077777) * (q & 077777)) & 040000) == 0) {
+ if (d != TWEXP31 || (((p & 0x7FFF) * (q & 0x7FFF)) & 040000) == 0) {
mp->arith_error = 1;
}
return EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((p & 077777) * (q & 077777)) & 040000) != 0) {
+ if (d == (double) i && (((p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) != 0) {
--i;
}
return i;
@@ -825,13 +825,13 @@ int mp_take_fraction (MP mp, int p, int q)
} else {
d -= 0.5;
if (d <= -TWEXP31) {
- if (d != -TWEXP31 || ((-(p & 077777) * (q & 077777)) & 040000) == 0) {
+ if (d != -TWEXP31 || ((-(p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) == 0) {
mp->arith_error = 1;
}
return -negative_EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && ((-(p & 077777) * (q & 077777)) & 040000) != 0) {
+ if (d == (double) i && ((-(p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) != 0) {
++i;
}
return i;
@@ -860,13 +860,13 @@ static int mp_take_scaled (MP mp, int p, int q)
if ((p ^ q) >= 0) {
d += 0.5;
if (d >= TWEXP31) {
- if (d != TWEXP31 || (((p & 077777) * (q & 077777)) & 040000) == 0) {
+ if (d != TWEXP31 || (((p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) == 0) {
mp->arith_error = 1;
}
return EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((p & 077777) * (q & 077777)) & 040000) != 0) {
+ if (d == (double) i && (((p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) != 0) {
--i;
}
return i;
@@ -874,13 +874,13 @@ static int mp_take_scaled (MP mp, int p, int q)
} else {
d -= 0.5;
if (d <= -TWEXP31) {
- if (d != -TWEXP31 || ((-(p & 077777) * (q & 077777)) & 040000) == 0) {
+ if (d != -TWEXP31 || ((-(p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) == 0) {
mp->arith_error = 1;
}
return -negative_EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && ((-(p & 077777) * (q & 077777)) & 040000) != 0) {
+ if (d == (double) i && ((-(p & 0x7FFF) * (q & 0x7FFF)) & 0x4000) != 0) {
++i;
}
return i;
@@ -914,7 +914,7 @@ int mp_make_scaled (MP mp, int p, int q)
return EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((q > 0 ? -q : q) & 077777) * (((i & 037777) << 1) - 1) & 04000) != 0) {
+ if (d == (double) i && (((q > 0 ? -q : q) & 0x7FFF) * (((i & 0x3FFF) << 1) - 1) & 0x800) != 0) {
--i;
}
return i;
@@ -926,7 +926,7 @@ int mp_make_scaled (MP mp, int p, int q)
return -negative_EL_GORDO;
} else {
int i = (int) d;
- if (d == (double) i && (((q > 0 ? q : -q) & 077777) * (((i & 037777) << 1) + 1) & 04000) != 0) {
+ if (d == (double) i && (((q > 0 ? q : -q) & 0x7FFF) * (((i & 0x3FFF) << 1) + 1) & 0x800) != 0) {
++i;
}
return i;
@@ -1575,7 +1575,7 @@ void mp_m_exp (MP mp, mp_number *ret, mp_number *x_orig)
} else {
if (x <= 0) {
z = -8 * x;
- y = 04000000; /* $y=2^{20}$ */
+ y = 0x100000; /* $y=2^{20}$ */
} else {
if (x <= 127919879) {
z = 1023359037 - 8 * x;
diff --git a/source/luametatex/source/mp/mpw/mpmathdouble.w b/source/luametatex/source/mp/mpw/mpmathdouble.w
index f5a91df75..c836fc4bb 100644
--- a/source/luametatex/source/mp/mpw/mpmathdouble.w
+++ b/source/luametatex/source/mp/mpw/mpmathdouble.w
@@ -23,7 +23,7 @@ math_data *mp_initialize_double_math (MP mp);
@* Math initialization.
-First, here are some very important constants.
+First, here are some very important constants. We can have some more (see velocity).
@d PI 3.1415926535897932384626433832795028841971
@d fraction_multiplier 4096.0
diff --git a/source/luametatex/source/mp/readme.txt b/source/luametatex/source/mp/readme.txt
index c48e19a89..bb98005de 100644
--- a/source/luametatex/source/mp/readme.txt
+++ b/source/luametatex/source/mp/readme.txt
@@ -1,14 +1,14 @@
-Remark
-
-When a CWEB file is adapted we need to convert to C. This is normally done with the tangle
-program but as we want to be independent of other tools (which themselves can result in a
-chain of dependencies) we use a Lua script which happens to be run with LuaMetaTeX.
-
-Of course there is a chicken egg issue here but at some point we started with C files so
-now we only need to update.
-
-The script is located in the "tools" path alongside the "source" path and it is run in its
-own directory (which for me means: hit the run key when the document is open). As we always
-ship the C files, there is no need for a user to run the script.
-
+Remark
+
+When a CWEB file is adapted we need to convert to C. This is normally done with the tangle
+program but as we want to be independent of other tools (which themselves can result in a
+chain of dependencies) we use a Lua script which happens to be run with LuaMetaTeX.
+
+Of course there is a chicken egg issue here but at some point we started with C files so
+now we only need to update.
+
+The script is located in the "tools" path alongside the "source" path and it is run in its
+own directory (which for me means: hit the run key when the document is open). As we always
+ship the C files, there is no need for a user to run the script.
+
Hans Hagen \ No newline at end of file
diff --git a/source/luametatex/source/readme.txt b/source/luametatex/source/readme.txt
index 2471f32b3..c045119ed 100644
--- a/source/luametatex/source/readme.txt
+++ b/source/luametatex/source/readme.txt
@@ -1,563 +1,563 @@
---------------------------------------------------------------------------------
-welcome
---------------------------------------------------------------------------------
-
-There is not much information here. I normally keep track of developments in
-articles or chapters in the history documents. These can (sometimes with a delay
-when it's an article) be found in the ConTeXt distribution. The history and
-development of LuaTeX is also documented there, often with examples or usage.
-
-The ideas behind this project are discussed in documents in the regular ConTeXt
-distribution. A short summary is: in order to make sure ConTeXt will work as
-intended, we distribute an engine with it. That way we can control stability,
-performance and features. It also permits experiments without the danger of
-interference with the engines used in other macro packages. Also, we don't want
-dependencies on large subsystems so we have a well defined set of libraries: we
-want to stay lean and mean. Eventually the same applies as to original TeX: we
-fix bugs and don't add all kind of stuff we don't (want or) need. Just that.
-
---------------------------------------------------------------------------------
-codebase
---------------------------------------------------------------------------------
-
-This codebase is a follow up on LuaTeX. It all started with a merge of files
-that came from the Pascal to C converter (CWEB) plus some C libraries. That code
-base evolved over time and there were the usual side effects of the translation
-and merge of (also other engine) code, plus successive extensions as well as Lua
-interfaces. In LuaMetaTeX I tried to smooth things a bit. The idea was to stay
-close to the original (which in the end is TeX itself) so that is why many
-variables, functions etc are named the way they are. Of course I reshuffled, and
-renamed but I also tried to stay close to the original naming. More work needs
-to be done to get it all right but it happens stepwise as I don't want to
-introduce bugs. In the meantime the LuaTeX and LuaMetaTeX code bases differ
-substantially but apart from some new features and stripping away backend and
-font code, the core should work the same.
-
-tex etex pdftex aleph:
-
-Of course the main body of code comes from its ancestors. We started with pdfTeX
-which has its frontend taken from standard TeX, later extended with the eTeX
-additions. Some additional features from pdfTeX were rewritten to become core
-functionality. We also took some from Aleph (Omega) but only some (in the
-meantime adapted) r2l code is left (so we're not compatible).
-
-mp:
-
-The maintainance of MetaPost was delegated to the same people who do luaTeX and
-as a step indevelopment a library was written. This library is used in
-LuaMetaTeX but has been adapted a bit for it. In principle some of the additions
-can be backported, but that is yet undecided.
-
-lua:
-
-This is the third major component of LuaMetaTeX. In LuaTeX a slightly patched
-version has been used but here we use an unchanged version, although the version
-number of the bytecode blob is adapted so that we can use intermediate versions
-of lua 5.4 that expect different bytecode without crashing on existing bytecode;
-this trick has been dropped but I hope at some point Lua will add a define for
-this.
-
-For the record: when we started with LuaTeX I'd gone through a pascal, modula 2,
-perl, ruby with respect to the management helpers for ConTeXt, like dealing with
-indexes, managing metapost subruns, and all kind of goodies that evolved over time.
-I ran into Lua in the SciTE editor and the language and the concept of a small and
-efficient embedded language. The language orginates in academia and is not under
-the influence of (company and commercial driven) marketing. A lot of effort goes
-into stepwise evolution. The authors are clear about the way they work on the
-language:
-
- http://lua-users.org/lists/lua-l/2008-06/msg00407.html
-
-which fits nicely in our philosophy. Just in case one wonders if other scripting
-languages were considered the answer is: no, they were not. The alternatives all
-are large and growing and come with large ecosystems (read: dependencies) and some
-had (seemingly) drastic changes in the design over time. Of course Lua also evolves
-but that is easy to deal with. And in the meantime also the performance of Lua made
-it clear that it was the right choice.
-
-avl:
-
-This library has been in use in the backend code of LuaTeX but is currently only
-used in the MP library. I'm not sure to what extend this (originally meant for
-Python) module has been adapted for pdfTeX/LuaTeX but afaiks it has been stable
-for a long time. It won't be updated but I might adapt it for instance wrt error
-messages so that it fits in.
-
-decnumber:
-
-This is used in one of the additional number models that the mp library supports.
-In LuaMetaTeX there is no support for the binary model. No one uses it and it
-would add quite a bit to the codebase.
-
-hnj:
-
-This GPL licensed module is used in the hyphenation machinery. It has been
-slightly adapted so that error messages and such fit in. I don't expect it to
-change much in the future.
-
-pplib:
-
-This library is made for Lua(Meta)TeX and provides an efficient PDF parser in
-pure C. In LuaTeX it was introduced a replacement for a larger library that
-was overkill for our purpose, depended on C++ and kept changing. This library
-itself uses libraries but that code is shipped with it. We use some of that
-for additional Lua modules (like md5, sha2 and decoding).
-
-lz4 | lzo | zstd:
-
-For years this library was in the code base and even interfaced but not enabled
-by default. When I played with zstd support as optional libary I decided that
-these two should move out of the code base and also be done the optional way. The
-amount of code was not that large, but the binary grew by some 10%. I also played
-with the foreign module and zstd and there is no real difference in peformance. The
-optionals are actually always enabled, but foreign is controlled by the command
-line option that enables loading libraries, and it al;so depends on libffi.
-
-zlib | miniz:
-
-I started with the code taken from LuaTeX, which itself was a copy that saw some
-adaptions over time (irr there were border case issues, like dealing with zero
-length streams and so). It doesn't change so in due time I might strip away some
-unused code. For a while libdeflate was used but because pplib also depends on
-zlib and because libdeflate doesn't do streams that was abandoned (it might come
-back as it is very nice and clean code.). One issue with (de)compression libraries
-is that they use tricks that can be architecture dependent and we stay away from
-that. I try to stay away from those and prefer to let the compiler sort things out.
-
-Early 2021 we switched to miniz. One reason is that the codebase is smaller because
-it doesn't deal with very old or rare platforms and architectures. Its performance
-is comparable, definitely for our purpose, and sometimes even a bit better. I looked
-at other alternatives but as soon as processor specific tricks are used, we end up
-with architecture specific header files and code so it's a no-go for a presumed
-long term stable and easy to compile program like luametatex. There is no gain in it
-anyway.
-
-complex:
-
-There is a complex number interface inspired by the complex number lua module by
-lhf. It also wraps libcerf usage.
-
-lfs:
-
-In LuaTeX we use a patched version of this library. In LuaMetaTeX I rewrote the
-code because too many patches were needed to deal with mswindows properly.
-
-socket:
-
-The core library is used. The library is seldom adapted but I keep an eye on it.
-We used to have a patched version in LuaTeX, but here we stay closer. I might
-eventually do some rewrite (less code) or decide to make it an external library.
-The related Lua code is not in the binary and context uses its own (derived)
-variant so that it uses our helpers as well as fits in the reporting system. I
-need to keep an eye on improvements upstream. We also need to keep an eye on
-copas as we use that code in context.
-
-luasec:
-
-This is not used but here as a reference for a possible future use (maybe as
-library).
-
-curl, ghostscript, graphicmagick, zint, mujs, mysql, postgress, sqlite, ...:
-
-The optional module mechamism supports some external libraries but we don't keep
-their code in the luametatex codebase. We might come up with a separate source
-tree for that, but only for some smaller ones. The large ones, those depending
-on other libraries, or c++, or whatever resources, will just be taken from the
-system.
-
-libcerf:
-
-This library might become external but is now in use as a plug into the complex
-number support that itself is meant for MetaPost use. The code here has been
-adapted to support the Microsoft compiler. I will keep an eye on what happens
-upstream and I reconsider matters later. (There is no real need to bloat the
-LuaMetaTeX binary with something that is rarely used.)
-
-kpse:
-
-There is optional library support for the KPSE library used in WEB2C. Although
-it does provide the methods that make sense, it is not meant for usage in
-ConTeXt, but more as a toolkit to identify issues and conflicts with parallel
-installations like TeXLive.
-
-hb:
-
-I have a module that works the same as the ffi variant from a couple of years
-ago and I might add it when it's needed (for oriental tex font development
-checking purposes, but then I also need to cleanup and add some test styles for
-that purpose). Looking at the many LuaTeX subversion checkins it looks a bit
-like a moving target. It's also written in C++ which we don't (want to) use in
-LuaMetaTeX. But the library comes with other programs so it's likely you can
-find it on you system someplace.
-
-general:
-
-It's really nice to see all these libraries popping up on the web but in the
-perspective of something like TeX one should be careful. Quite often what is hip
-today is old fashioned tomorrow. And quite often the selling point of the new
-thing comes with bashing the old, which can be a sign of something being a
-temporary thing or itself something ot be superseded soon. Now, get me right:
-TeX in itself is great, and so are successors. In that sense LuaMetaTeX is just
-a follow up with no claims made for it being better. It just makes things easier
-for ConTeXt. You can kick in libraries but be aware of the fact that they can
-change, so if you have long running projects, make sure you save them. Or run a
-virtual machine that can last forever. TeX systems can run for ages that way. We
-might eventually add support for generating libs to the compile farm. The older
-a library gets, the bigger the change that its api is stable. Compression
-libraries are great examples, while libraries that deal with images, conversion
-and rendering are more moving (and have way more dependencies too). Actually,
-for the later category, in ConTeXt we prefer to call the command line variants
-instead of using libraries, also because it seldom influences performance.
-
-licenses:
-
-Most files contain some notice about a the license and most are quite liberal.
-I had to add some (notes) that were missing from LuaTeX. There is an occasional
-readme file that tells a bit more.
-
-explanations:
-
-The TeX derived source code contains many comments that came with the code when
-it was moved from "Pascal Web" to "C Web" (with web2c) to "C plus comments" (by
-Taco). These comments are mostly from Don Knuth as they were part of TeX The
-Program. However, some comments were added (or changed) in the perspective of
-eTeX, pdfTeX, Aleph, etc. We also added some in LuaTeX and LuaMetaTeX. So, in
-the meantime it's a mix. It us who made the mess, not Don! In due time I hope
-to go over all the comments and let them fit the (extended) code.
-
-dependencies:
-
-Often the files here include more h files than needed but given the speed of
-compilation that is no problem. It also helps to identify potential name clashes
-and such.
-
-legacy:
-
-Occasionally there is a file texlegacy.c that has some older (maybe reworked)
-code but I move it to another place when It gets too large and its code no
-longer can be retrofit. For me is shows a bit what got done in the (many)
-intermediate steps.
-
---------------------------------------------------------------------------------
-documentation
---------------------------------------------------------------------------------
-
-The code will be stepwise cleaned up a it (removing the web2c side effects),
-making the many branches stand out etc so that some aspects can be documented
-a bit better (in due time). All this will take time (and already quite some time
-went into it.) The official interface of LuaMetaTeX is described in the manual
-and examples of usage can be seen in ConTeXt. Of course TeX behaves as such.
-
-The organization of files, names of functions can change as we progress but when
-possible the knuthian naming is followed so that the documentation of "TeX The
-Program" still (mostly) applies. Some of the improvements in LuaMetaTeX can
-eventually trickle back into LuaTeX although we need to guard stability. The
-files here can *not* be dropped into the LuaTeX source tree!
-
---------------------------------------------------------------------------------
-reboot
---------------------------------------------------------------------------------
-
-I'll experiment with a reboot engine option but for sure that also interferes
-with a macro package initialization so it's a long term experiment. Quite
-certainly it will not pay off anyway so it might never happen. But there are
-some pending ideas so ...
-
---------------------------------------------------------------------------------
-libraries | ffi | luajit
---------------------------------------------------------------------------------
-
-We use optional libraries instead of ffi which is not supported because it is
-cpu and platform bound and the project that the code was taken from seems to
-be orphaned. Also luajit is not supported as that projects is stalled and uses
-an old lua.
-
---------------------------------------------------------------------------------
-cmake
---------------------------------------------------------------------------------
-
-We (Mojca and Hans) try to make the build as simple as possible with a minimum
-of depencies. There are some differences with respect to unix and windows (we
-support msvc, crosscompiled mingw and clang). The code of libraries that we use
-is included, apart from optional libraries. It can only get better.
-
-We really try to make all compilers happy and minimize the number of messages,
-even if that makes the code a bit less nice. It's a bit unfortunate that over
-time the demands and default change a bit (what was needed before triggers a
-warning later).
-
---------------------------------------------------------------------------------
-experiments
---------------------------------------------------------------------------------
-
-I've done quite some experiments but those that in the end didn't make sense, or
-complicated the code, or where nice but not that useful after all were simply
-deleted so that no traces are left that can clutter the codebase. I'll probably
-for get (and for sure already have forgotten) about most of them so maybe some
-day they will show up as (different) experiments. We'll see how that goes.
-
--- miniz : smaller pdf files, less code, similar performance
--- mimalloc : especially faster for the lua subsystem
-
---------------------------------------------------------------------------------
-performance
---------------------------------------------------------------------------------
-
-By now the codebase is different from the LuaTeX one and as a consequence the
-performance can also differ. But it's hard to measure in ConTeXt because much
-more has to be done in Lua and that comes at a price. The native LuaTeX backend
-is for instance much faster (last time meausred the penalty can be up to 20%).
-On the Internet one can run into complaints about performance of LuaTeX with
-other macro packages, so one might wonder why we made this move but speed is
-not everything. On the average ConTeXt has not become less efficient, or
-at least I don't see its users complain much about it, so we just moved on.
-
-The memory footprint at the engine end is somewhat smaller but of course that
-gets compensated by memory consumption at the Lua end. We also sacrifice the
-significate gain of the faster LuaJIT virtual machine (although at some point
-supporting that variant makes not much sense any more as it lacks some Lua
-features). Because, contrary to other TeX's the Lua(Meta)TeX frontend code
-is split up in separate units, compilers can probably do less optimization,
-although we use large compilations units that are mostly independent of each
-other.
-
-Eventually, in a next stage, I might be able to compentate it but don't expect
-miracles: I already explored all kind of variations. Buying a faster machine is
-always an option. Multiple cores don't help, faster memory and caching of files
-does. Already early in the LuaTeX development we found that a CPU cache matters
-but (definitely on server with many a virtual machines) there LuaMetaTeX has to
-compete.
-
-So, at this point my objective is not so much to make LuaMetaTeX run faster but
-more to make sure that it keeps the same performance, even if more functionality
-gets added to the TeX, MetaPost and/or Lua parts. Also keep in mind that in the
-end inefficient macros and styles play a bigger role that the already pretty
-fast engine.
-
---------------------------------------------------------------------------------
-rapid development cycle
---------------------------------------------------------------------------------
-
-Because I don't want to divert too much (and fast) from the way traditional TeX
-is coded, the transition is a stepwise process. This also means that much code
-that first has been abstracted and cleaned up, later goes. The extra work that
-is involved, combined with a fast test cycle with the help of ConTeXt users
-ensures that we keep a working ConTeXt although there occasionally are periods
-with issues, especially when fundamentals change or are extended. However, the
-number of temporary bugs is small compared to the number of changes and
-extensions and worth the risk. The alternative is to have long periods where we
-don't update the engine, but that makes testing the related changes in ConTeXt
-rather cumbersome. After all, the engine targets at ConTeXt. But of course it is
-kind of a pity that no one sees what steps were used to get there.
-
---------------------------------------------------------------------------------
-api
---------------------------------------------------------------------------------
-
-Although some symbols can be visible due to the fact that we maek them extern as
-past of a code splitup, there is no api at all. Don't expect the names of the
-functions and variables that this applies to to remain the same. Blame yourself
-for abusing this partial exposure. The abstraction is in the \LUA\ interface and
-when possible that one stays the same. Adding more and more access (callbacks)
-won't happen because it has an impact on performance.
-
-Because we want to stay close to original TeX in many aspects, the names of
-functions try to match those in ttp. However, because we're now in pure C, we
-have more functions (and less macros). The compiler will inline many of them,
-but plenty will show up in the symbols table, when exposed. For that reason we
-prefix all functions in categories so that they at least show up in groups. It
-is also the reason why in for instance the optional modules code we collect all
-visible locals in structs. It's all a stepwise process.
-
-The split in tex* modules is mostly for convenience. The original program is
-monolithic (you can get an idea when you look at mp.c) so in a sense they should
-all be seen as a whole. As a consequence we have tex_run_* as externals as well
-as locals. It's just an on-purpose side effect, not a matter of inconsistency:
-there is no tex api.
-
---------------------------------------------------------------------------------
-todo (ongoing)
---------------------------------------------------------------------------------
-
-- All errors and warnings (lua|tex|fatal) have to be checked; what is critital
- and what not.
-- I need to figure out why filetime differs between msvc and mingw (daylight
- correction probably).
-- Nested runtime measurement is currently not working on unix (but it works ok
- on microsoft windows).
-- I will check the manual for obsolete, removed and added functionality. This
- is an ongoing effort.
-- Eventually I might do some more cleanup of the mp*.w code. For now we keep
- w files, but who knows ...
-- A bit more reshuffling of functions to functional units is possible but that
- happens stepwise as it's easy to introduce bug(let)s. I will occasionally go
- over all code.
-- I might turn some more macros into functions (needs some reshuffling too)
- because it's nicer wrt tracing issues. When we started with LuaTeX macros
- made more sense but compilers got better. In the meantime whole program
- optimization works okay, but we cannot do that when one also wants to load
- modules.
-- A side track of the lack of stripping (see previous note) is that we need to
- namespace locals more agressive ... most is done.
-- We can clean up the dependency chain i.e. header files and such but this is
- a long term activity. It's also not that important.
-- Maybe nodememoryword vs tokenmemoryword so that the compiler can warn for a
- mixup.
-- Remove some more (also cosmetic) side effects of mp library conversion.
-- Replace some more of the print* chains by the more compact print_format call
- (no hurry with that one).
-- The naming between modules (token, tex, node) of functions is (historically)
- a bit inconsistent (getfoo, get_foo etc) so I might make that better. It does
- have some impact on compatibility but one can alias (we can provide a file).
-- Some more interface related code might get abstracted (much already done).
-- I don't mention other (either or not already rejected) ideas and experiments
- here (like pushing/popping pagebuilder states which is messy and also demands
- too much from the macro package end.)
-- Stepwise I'll make the complete split of command codes (chr) and subtypes.
- This is mostly done but there are some leftovers. It also means that we no
- longer are completely in sync with the internal original \TEX\ naming but I'll
- try to remain close.
-- The glyph and math scale features do not yet check for overflow of maxdimen
- but I'll add some more checks and/or impose some limitations on the scale
- values. We have to keep in mind that TeX itself also hapilly accepts some
- wrap around because it doesn't really crash the engine; it just can have side
- effects.
-
---------------------------------------------------------------------------------
-todo (second phase)
---------------------------------------------------------------------------------
-
-Ideally we'd like to see more local variables (like some cur_val and such) but
-it's kind of tricky because these globals are all over the place and sometimes
-get saved and restored (so that needs careful checking), and sometimes such a
-variable is expected to be set in a nested call. It also spoils the (still
-mostly original) documentation. So, some will happen, some won't. I actually
-tested some rather drastic localization and even with tripple checking there
-were side effects, so I reverted that. (We probably end up with a mix that
-shows the intention.)
-
-Anyway, there are (and will be) some changes (return values instead of accessing
-global) that give a bit less code on the one hand (and therefore look somewhat
-cleaner) but are not always more efficient. It's all a matter of taste.
-
-I'm on and off looking at the files and their internal documentation and in the
-process rename some variables, do some extra checking, and remove unused code.
-This is a bit random activity that I started doing pending the first official
-release.
-
-Now that the math engine has been partly redone the question is: should we keep
-the font related control options? They might go away at some point and even
-support for traditional eight bit fonts might be dropped. We'll see about that.
-
-That is: we saw about it. End 2021 and beginning of 2022 Mikael Sundqvist and I
-spent quite a few months on playing around with new features: more classes, inter
-atom spacing, inter atom penalties, atom rules, a few more FontParameters, a bit
-more control on top of what we already had, etc. In the end some of the control
-already present became standardized in a way that now prefers OpenType fonts.
-Persistent issues with fonts are now dealt with on a per font basis in ConteXt
-using existing as well as new tweaking features. We started talking micro math
-typography. Old fonts are still supported but one has to configure the engine
-with respecty to the used technology. Another side effect is that we now store
-math character specifications in nodes instead of a number.
-
-It makes sense to simplify delimiters (just make them a mathchar) and get rid of
-the large family and char. These next in size and extensibles are to be related
-anyway so one can always make a (runtime) virtual font. The main problem is that
-we then need to refactor some tex (format) code too becuase we no longer have
-delimiters there too.
-
---------------------------------------------------------------------------------
-dependencies
---------------------------------------------------------------------------------
-
-There are no depencies on code outside this tree and we keep it that way. If you
-follow the TeXLive (LuaTeX) source update you'll notice that there are quite
-often updates of libraries and sometimes they give (initial) issues when being
-compiled, also because there can be further dependencies on compilers as well as
-libraries specific to a (version of) an operating system. This is not something
-that users should be bothered with.
-
-Optional libraries are really optional and although an API can change we will
-not include related code in the formal LuaMetaTeX code base. We might offer some
-in the build farm (for building libraries) but that is not a formal dependency.
-We will of course adapt code to changes in API's but also never provide more
-than a minimal interface: use Lua when more is needed.
-
-We keep in sync with Lua development, also because we consider LuaMetaTeX to be
-a nice test case. We never really have issues with Lua anyway. Maybe at some
-point I will replace the socket related code. The mimalloc libraries used gives
-a performance boost but we could do without. The build cerf library might be
-replaced by an optional but it also depends on the complex datatype being more
-mature: there is now a fundamental difference between compilers so we have a
-patched version; the code doesn't change anyway, so maybe it can be stripped.
-
-In practice there have been hardly any updates to the libraries that we do use:
-most changes are in auxiliary programs and make files anyway. When there is an
-update (most are on github) this is what happens:
-
--- check out code
--- compare used subset (like /src) with working copy
--- merge with working copy if it makes sense (otherwise delay)
--- test for a while (local compilation etc.)
--- compare used subset again, this time with local repository
--- merge with local repository
--- push update to the build farm
-
-So, each change is checked twice which in practice doesn't take much time but
-gives a good idea of the kind of changes. So far we never had to roll back.
-
-We still use CWEB formatting for MetaPost which then involves a conversion to C
-code but the C code is included. This removes a depedency on the WEB toolchain.
-The Lua based converter that is part of this source tree works quite well for
-our purpose (and also gives nicer code).
-
-We don't do any architecture (CPU) or operating system specific optimizations,
-simply because there is no real gain for LuaMetaTeX. It would only introduce
-issues, a more complex build, dependencies on assembly generators, etc. which
-is a no-go.
-
---------------------------------------------------------------------------------
-team / responsibilities
---------------------------------------------------------------------------------
-
-The LuaTeX code base is part of the ConTeXt code base. That way we can guarantee
-its working with the ConTeXt macro package and also experiment as much as we
-like without harming this package. The ConTeXt code is maintained by Hans Hagen
-and Wolfgang Schuster with of course help and input from others (those who are
-on the mailing list will have no problem identifying who). Because we see the
-LuaMetaTeX code as part of that effort, starting with its more or less official
-release (version 2.05, early 2020), Hans and Wolfgang will be responsible for
-the code (knowing that we can always fall back on Taco) and explore further
-possibilities. Mojca Miklavec handles the compile farm, coordinates the
-distributions, deals with integration in TeXLive, etc. Alan Braslau is the first
-line tester so that in an early stage we can identify issues with for TeX,
-MetaPost, Lua and compilation on the different platforms that users have.
-
-If you run into problems with LuaMetaTeX, the ConTeXt mailing list is the place
-to go to: ntg-context@ntg.nl. Of course you can also communicate LuaTeX problems
-there, especially when you suspect that both engines share it, but for specific
-LuaTeX issues there is dev-luatex@ntg.nl where the LuaTeX team can help you
-further.
-
-This (mid 2018 - begin 2020) is the first stage of the development. Before we
-move on, we (read: users) will first test the current implementation more
-extensively over a longer period of time, something that is really needed because
-there are lots of accumulated changes, and I would not be surprised if subtle
-issues have been introduced. In the meantime we will discuss how to follow up.
-
-The version in the distribution is always tested with the ConteXt test suite,
-which hopefully uncovers issues before users notice.
-
-Stay tuned!
---------------------------------------------------------------------------------
-
---------------------------------------------------------------------------------
-ConTeXt websites : http://contextgarden.net http://www.pragma-ade.nl
-Development list : dev-context@ntg.nl
-Support list : context@ntg.nl
-User groups : http://ntg.nl http://tug.org etc
---------------------------------------------------------------------------------
-
---------------------------------------------------------------------------------
-Hans Hagen : j.hagen@xs4all.nl
---------------------------------------------------------------------------------
+--------------------------------------------------------------------------------
+welcome
+--------------------------------------------------------------------------------
+
+There is not much information here. I normally keep track of developments in
+articles or chapters in the history documents. These can (sometimes with a delay
+when it's an article) be found in the ConTeXt distribution. The history and
+development of LuaTeX is also documented there, often with examples or usage.
+
+The ideas behind this project are discussed in documents in the regular ConTeXt
+distribution. A short summary is: in order to make sure ConTeXt will work as
+intended, we distribute an engine with it. That way we can control stability,
+performance and features. It also permits experiments without the danger of
+interference with the engines used in other macro packages. Also, we don't want
+dependencies on large subsystems so we have a well defined set of libraries: we
+want to stay lean and mean. Eventually the same applies as to original TeX: we
+fix bugs and don't add all kind of stuff we don't (want or) need. Just that.
+
+--------------------------------------------------------------------------------
+codebase
+--------------------------------------------------------------------------------
+
+This codebase is a follow up on LuaTeX. It all started with a merge of files
+that came from the Pascal to C converter (CWEB) plus some C libraries. That code
+base evolved over time and there were the usual side effects of the translation
+and merge of (also other engine) code, plus successive extensions as well as Lua
+interfaces. In LuaMetaTeX I tried to smooth things a bit. The idea was to stay
+close to the original (which in the end is TeX itself) so that is why many
+variables, functions etc are named the way they are. Of course I reshuffled, and
+renamed but I also tried to stay close to the original naming. More work needs
+to be done to get it all right but it happens stepwise as I don't want to
+introduce bugs. In the meantime the LuaTeX and LuaMetaTeX code bases differ
+substantially but apart from some new features and stripping away backend and
+font code, the core should work the same.
+
+tex etex pdftex aleph:
+
+Of course the main body of code comes from its ancestors. We started with pdfTeX
+which has its frontend taken from standard TeX, later extended with the eTeX
+additions. Some additional features from pdfTeX were rewritten to become core
+functionality. We also took some from Aleph (Omega) but only some (in the
+meantime adapted) r2l code is left (so we're not compatible).
+
+mp:
+
+The maintainance of MetaPost was delegated to the same people who do luaTeX and
+as a step indevelopment a library was written. This library is used in
+LuaMetaTeX but has been adapted a bit for it. In principle some of the additions
+can be backported, but that is yet undecided.
+
+lua:
+
+This is the third major component of LuaMetaTeX. In LuaTeX a slightly patched
+version has been used but here we use an unchanged version, although the version
+number of the bytecode blob is adapted so that we can use intermediate versions
+of lua 5.4 that expect different bytecode without crashing on existing bytecode;
+this trick has been dropped but I hope at some point Lua will add a define for
+this.
+
+For the record: when we started with LuaTeX I'd gone through a pascal, modula 2,
+perl, ruby with respect to the management helpers for ConTeXt, like dealing with
+indexes, managing metapost subruns, and all kind of goodies that evolved over time.
+I ran into Lua in the SciTE editor and the language and the concept of a small and
+efficient embedded language. The language orginates in academia and is not under
+the influence of (company and commercial driven) marketing. A lot of effort goes
+into stepwise evolution. The authors are clear about the way they work on the
+language:
+
+ http://lua-users.org/lists/lua-l/2008-06/msg00407.html
+
+which fits nicely in our philosophy. Just in case one wonders if other scripting
+languages were considered the answer is: no, they were not. The alternatives all
+are large and growing and come with large ecosystems (read: dependencies) and some
+had (seemingly) drastic changes in the design over time. Of course Lua also evolves
+but that is easy to deal with. And in the meantime also the performance of Lua made
+it clear that it was the right choice.
+
+avl:
+
+This library has been in use in the backend code of LuaTeX but is currently only
+used in the MP library. I'm not sure to what extend this (originally meant for
+Python) module has been adapted for pdfTeX/LuaTeX but afaiks it has been stable
+for a long time. It won't be updated but I might adapt it for instance wrt error
+messages so that it fits in.
+
+decnumber:
+
+This is used in one of the additional number models that the mp library supports.
+In LuaMetaTeX there is no support for the binary model. No one uses it and it
+would add quite a bit to the codebase.
+
+hnj:
+
+This GPL licensed module is used in the hyphenation machinery. It has been
+slightly adapted so that error messages and such fit in. I don't expect it to
+change much in the future.
+
+pplib:
+
+This library is made for Lua(Meta)TeX and provides an efficient PDF parser in
+pure C. In LuaTeX it was introduced a replacement for a larger library that
+was overkill for our purpose, depended on C++ and kept changing. This library
+itself uses libraries but that code is shipped with it. We use some of that
+for additional Lua modules (like md5, sha2 and decoding).
+
+lz4 | lzo | zstd:
+
+For years this library was in the code base and even interfaced but not enabled
+by default. When I played with zstd support as optional libary I decided that
+these two should move out of the code base and also be done the optional way. The
+amount of code was not that large, but the binary grew by some 10%. I also played
+with the foreign module and zstd and there is no real difference in peformance. The
+optionals are actually always enabled, but foreign is controlled by the command
+line option that enables loading libraries, and it al;so depends on libffi.
+
+zlib | miniz:
+
+I started with the code taken from LuaTeX, which itself was a copy that saw some
+adaptions over time (irr there were border case issues, like dealing with zero
+length streams and so). It doesn't change so in due time I might strip away some
+unused code. For a while libdeflate was used but because pplib also depends on
+zlib and because libdeflate doesn't do streams that was abandoned (it might come
+back as it is very nice and clean code.). One issue with (de)compression libraries
+is that they use tricks that can be architecture dependent and we stay away from
+that. I try to stay away from those and prefer to let the compiler sort things out.
+
+Early 2021 we switched to miniz. One reason is that the codebase is smaller because
+it doesn't deal with very old or rare platforms and architectures. Its performance
+is comparable, definitely for our purpose, and sometimes even a bit better. I looked
+at other alternatives but as soon as processor specific tricks are used, we end up
+with architecture specific header files and code so it's a no-go for a presumed
+long term stable and easy to compile program like luametatex. There is no gain in it
+anyway.
+
+complex:
+
+There is a complex number interface inspired by the complex number lua module by
+lhf. It also wraps libcerf usage.
+
+lfs:
+
+In LuaTeX we use a patched version of this library. In LuaMetaTeX I rewrote the
+code because too many patches were needed to deal with mswindows properly.
+
+socket:
+
+The core library is used. The library is seldom adapted but I keep an eye on it.
+We used to have a patched version in LuaTeX, but here we stay closer. I might
+eventually do some rewrite (less code) or decide to make it an external library.
+The related Lua code is not in the binary and context uses its own (derived)
+variant so that it uses our helpers as well as fits in the reporting system. I
+need to keep an eye on improvements upstream. We also need to keep an eye on
+copas as we use that code in context.
+
+luasec:
+
+This is not used but here as a reference for a possible future use (maybe as
+library).
+
+curl, ghostscript, graphicmagick, zint, mujs, mysql, postgress, sqlite, ...:
+
+The optional module mechamism supports some external libraries but we don't keep
+their code in the luametatex codebase. We might come up with a separate source
+tree for that, but only for some smaller ones. The large ones, those depending
+on other libraries, or c++, or whatever resources, will just be taken from the
+system.
+
+libcerf:
+
+This library might become external but is now in use as a plug into the complex
+number support that itself is meant for MetaPost use. The code here has been
+adapted to support the Microsoft compiler. I will keep an eye on what happens
+upstream and I reconsider matters later. (There is no real need to bloat the
+LuaMetaTeX binary with something that is rarely used.)
+
+kpse:
+
+There is optional library support for the KPSE library used in WEB2C. Although
+it does provide the methods that make sense, it is not meant for usage in
+ConTeXt, but more as a toolkit to identify issues and conflicts with parallel
+installations like TeXLive.
+
+hb:
+
+I have a module that works the same as the ffi variant from a couple of years
+ago and I might add it when it's needed (for oriental tex font development
+checking purposes, but then I also need to cleanup and add some test styles for
+that purpose). Looking at the many LuaTeX subversion checkins it looks a bit
+like a moving target. It's also written in C++ which we don't (want to) use in
+LuaMetaTeX. But the library comes with other programs so it's likely you can
+find it on you system someplace.
+
+general:
+
+It's really nice to see all these libraries popping up on the web but in the
+perspective of something like TeX one should be careful. Quite often what is hip
+today is old fashioned tomorrow. And quite often the selling point of the new
+thing comes with bashing the old, which can be a sign of something being a
+temporary thing or itself something ot be superseded soon. Now, get me right:
+TeX in itself is great, and so are successors. In that sense LuaMetaTeX is just
+a follow up with no claims made for it being better. It just makes things easier
+for ConTeXt. You can kick in libraries but be aware of the fact that they can
+change, so if you have long running projects, make sure you save them. Or run a
+virtual machine that can last forever. TeX systems can run for ages that way. We
+might eventually add support for generating libs to the compile farm. The older
+a library gets, the bigger the change that its api is stable. Compression
+libraries are great examples, while libraries that deal with images, conversion
+and rendering are more moving (and have way more dependencies too). Actually,
+for the later category, in ConTeXt we prefer to call the command line variants
+instead of using libraries, also because it seldom influences performance.
+
+licenses:
+
+Most files contain some notice about a the license and most are quite liberal.
+I had to add some (notes) that were missing from LuaTeX. There is an occasional
+readme file that tells a bit more.
+
+explanations:
+
+The TeX derived source code contains many comments that came with the code when
+it was moved from "Pascal Web" to "C Web" (with web2c) to "C plus comments" (by
+Taco). These comments are mostly from Don Knuth as they were part of TeX The
+Program. However, some comments were added (or changed) in the perspective of
+eTeX, pdfTeX, Aleph, etc. We also added some in LuaTeX and LuaMetaTeX. So, in
+the meantime it's a mix. It us who made the mess, not Don! In due time I hope
+to go over all the comments and let them fit the (extended) code.
+
+dependencies:
+
+Often the files here include more h files than needed but given the speed of
+compilation that is no problem. It also helps to identify potential name clashes
+and such.
+
+legacy:
+
+Occasionally there is a file texlegacy.c that has some older (maybe reworked)
+code but I move it to another place when It gets too large and its code no
+longer can be retrofit. For me is shows a bit what got done in the (many)
+intermediate steps.
+
+--------------------------------------------------------------------------------
+documentation
+--------------------------------------------------------------------------------
+
+The code will be stepwise cleaned up a it (removing the web2c side effects),
+making the many branches stand out etc so that some aspects can be documented
+a bit better (in due time). All this will take time (and already quite some time
+went into it.) The official interface of LuaMetaTeX is described in the manual
+and examples of usage can be seen in ConTeXt. Of course TeX behaves as such.
+
+The organization of files, names of functions can change as we progress but when
+possible the knuthian naming is followed so that the documentation of "TeX The
+Program" still (mostly) applies. Some of the improvements in LuaMetaTeX can
+eventually trickle back into LuaTeX although we need to guard stability. The
+files here can *not* be dropped into the LuaTeX source tree!
+
+--------------------------------------------------------------------------------
+reboot
+--------------------------------------------------------------------------------
+
+I'll experiment with a reboot engine option but for sure that also interferes
+with a macro package initialization so it's a long term experiment. Quite
+certainly it will not pay off anyway so it might never happen. But there are
+some pending ideas so ...
+
+--------------------------------------------------------------------------------
+libraries | ffi | luajit
+--------------------------------------------------------------------------------
+
+We use optional libraries instead of ffi which is not supported because it is
+cpu and platform bound and the project that the code was taken from seems to
+be orphaned. Also luajit is not supported as that projects is stalled and uses
+an old lua.
+
+--------------------------------------------------------------------------------
+cmake
+--------------------------------------------------------------------------------
+
+We (Mojca and Hans) try to make the build as simple as possible with a minimum
+of depencies. There are some differences with respect to unix and windows (we
+support msvc, crosscompiled mingw and clang). The code of libraries that we use
+is included, apart from optional libraries. It can only get better.
+
+We really try to make all compilers happy and minimize the number of messages,
+even if that makes the code a bit less nice. It's a bit unfortunate that over
+time the demands and default change a bit (what was needed before triggers a
+warning later).
+
+--------------------------------------------------------------------------------
+experiments
+--------------------------------------------------------------------------------
+
+I've done quite some experiments but those that in the end didn't make sense, or
+complicated the code, or where nice but not that useful after all were simply
+deleted so that no traces are left that can clutter the codebase. I'll probably
+for get (and for sure already have forgotten) about most of them so maybe some
+day they will show up as (different) experiments. We'll see how that goes.
+
+-- miniz : smaller pdf files, less code, similar performance
+-- mimalloc : especially faster for the lua subsystem
+
+--------------------------------------------------------------------------------
+performance
+--------------------------------------------------------------------------------
+
+By now the codebase is different from the LuaTeX one and as a consequence the
+performance can also differ. But it's hard to measure in ConTeXt because much
+more has to be done in Lua and that comes at a price. The native LuaTeX backend
+is for instance much faster (last time meausred the penalty can be up to 20%).
+On the Internet one can run into complaints about performance of LuaTeX with
+other macro packages, so one might wonder why we made this move but speed is
+not everything. On the average ConTeXt has not become less efficient, or
+at least I don't see its users complain much about it, so we just moved on.
+
+The memory footprint at the engine end is somewhat smaller but of course that
+gets compensated by memory consumption at the Lua end. We also sacrifice the
+significate gain of the faster LuaJIT virtual machine (although at some point
+supporting that variant makes not much sense any more as it lacks some Lua
+features). Because, contrary to other TeX's the Lua(Meta)TeX frontend code
+is split up in separate units, compilers can probably do less optimization,
+although we use large compilations units that are mostly independent of each
+other.
+
+Eventually, in a next stage, I might be able to compentate it but don't expect
+miracles: I already explored all kind of variations. Buying a faster machine is
+always an option. Multiple cores don't help, faster memory and caching of files
+does. Already early in the LuaTeX development we found that a CPU cache matters
+but (definitely on server with many a virtual machines) there LuaMetaTeX has to
+compete.
+
+So, at this point my objective is not so much to make LuaMetaTeX run faster but
+more to make sure that it keeps the same performance, even if more functionality
+gets added to the TeX, MetaPost and/or Lua parts. Also keep in mind that in the
+end inefficient macros and styles play a bigger role that the already pretty
+fast engine.
+
+--------------------------------------------------------------------------------
+rapid development cycle
+--------------------------------------------------------------------------------
+
+Because I don't want to divert too much (and fast) from the way traditional TeX
+is coded, the transition is a stepwise process. This also means that much code
+that first has been abstracted and cleaned up, later goes. The extra work that
+is involved, combined with a fast test cycle with the help of ConTeXt users
+ensures that we keep a working ConTeXt although there occasionally are periods
+with issues, especially when fundamentals change or are extended. However, the
+number of temporary bugs is small compared to the number of changes and
+extensions and worth the risk. The alternative is to have long periods where we
+don't update the engine, but that makes testing the related changes in ConTeXt
+rather cumbersome. After all, the engine targets at ConTeXt. But of course it is
+kind of a pity that no one sees what steps were used to get there.
+
+--------------------------------------------------------------------------------
+api
+--------------------------------------------------------------------------------
+
+Although some symbols can be visible due to the fact that we maek them extern as
+past of a code splitup, there is no api at all. Don't expect the names of the
+functions and variables that this applies to to remain the same. Blame yourself
+for abusing this partial exposure. The abstraction is in the \LUA\ interface and
+when possible that one stays the same. Adding more and more access (callbacks)
+won't happen because it has an impact on performance.
+
+Because we want to stay close to original TeX in many aspects, the names of
+functions try to match those in ttp. However, because we're now in pure C, we
+have more functions (and less macros). The compiler will inline many of them,
+but plenty will show up in the symbols table, when exposed. For that reason we
+prefix all functions in categories so that they at least show up in groups. It
+is also the reason why in for instance the optional modules code we collect all
+visible locals in structs. It's all a stepwise process.
+
+The split in tex* modules is mostly for convenience. The original program is
+monolithic (you can get an idea when you look at mp.c) so in a sense they should
+all be seen as a whole. As a consequence we have tex_run_* as externals as well
+as locals. It's just an on-purpose side effect, not a matter of inconsistency:
+there is no tex api.
+
+--------------------------------------------------------------------------------
+todo (ongoing)
+--------------------------------------------------------------------------------
+
+- All errors and warnings (lua|tex|fatal) have to be checked; what is critital
+ and what not.
+- I need to figure out why filetime differs between msvc and mingw (daylight
+ correction probably).
+- Nested runtime measurement is currently not working on unix (but it works ok
+ on microsoft windows).
+- I will check the manual for obsolete, removed and added functionality. This
+ is an ongoing effort.
+- Eventually I might do some more cleanup of the mp*.w code. For now we keep
+ w files, but who knows ...
+- A bit more reshuffling of functions to functional units is possible but that
+ happens stepwise as it's easy to introduce bug(let)s. I will occasionally go
+ over all code.
+- I might turn some more macros into functions (needs some reshuffling too)
+ because it's nicer wrt tracing issues. When we started with LuaTeX macros
+ made more sense but compilers got better. In the meantime whole program
+ optimization works okay, but we cannot do that when one also wants to load
+ modules.
+- A side track of the lack of stripping (see previous note) is that we need to
+ namespace locals more agressive ... most is done.
+- We can clean up the dependency chain i.e. header files and such but this is
+ a long term activity. It's also not that important.
+- Maybe nodememoryword vs tokenmemoryword so that the compiler can warn for a
+ mixup.
+- Remove some more (also cosmetic) side effects of mp library conversion.
+- Replace some more of the print* chains by the more compact print_format call
+ (no hurry with that one).
+- The naming between modules (token, tex, node) of functions is (historically)
+ a bit inconsistent (getfoo, get_foo etc) so I might make that better. It does
+ have some impact on compatibility but one can alias (we can provide a file).
+- Some more interface related code might get abstracted (much already done).
+- I don't mention other (either or not already rejected) ideas and experiments
+ here (like pushing/popping pagebuilder states which is messy and also demands
+ too much from the macro package end.)
+- Stepwise I'll make the complete split of command codes (chr) and subtypes.
+ This is mostly done but there are some leftovers. It also means that we no
+ longer are completely in sync with the internal original \TEX\ naming but I'll
+ try to remain close.
+- The glyph and math scale features do not yet check for overflow of maxdimen
+ but I'll add some more checks and/or impose some limitations on the scale
+ values. We have to keep in mind that TeX itself also hapilly accepts some
+ wrap around because it doesn't really crash the engine; it just can have side
+ effects.
+
+--------------------------------------------------------------------------------
+todo (second phase)
+--------------------------------------------------------------------------------
+
+Ideally we'd like to see more local variables (like some cur_val and such) but
+it's kind of tricky because these globals are all over the place and sometimes
+get saved and restored (so that needs careful checking), and sometimes such a
+variable is expected to be set in a nested call. It also spoils the (still
+mostly original) documentation. So, some will happen, some won't. I actually
+tested some rather drastic localization and even with tripple checking there
+were side effects, so I reverted that. (We probably end up with a mix that
+shows the intention.)
+
+Anyway, there are (and will be) some changes (return values instead of accessing
+global) that give a bit less code on the one hand (and therefore look somewhat
+cleaner) but are not always more efficient. It's all a matter of taste.
+
+I'm on and off looking at the files and their internal documentation and in the
+process rename some variables, do some extra checking, and remove unused code.
+This is a bit random activity that I started doing pending the first official
+release.
+
+Now that the math engine has been partly redone the question is: should we keep
+the font related control options? They might go away at some point and even
+support for traditional eight bit fonts might be dropped. We'll see about that.
+
+That is: we saw about it. End 2021 and beginning of 2022 Mikael Sundqvist and I
+spent quite a few months on playing around with new features: more classes, inter
+atom spacing, inter atom penalties, atom rules, a few more FontParameters, a bit
+more control on top of what we already had, etc. In the end some of the control
+already present became standardized in a way that now prefers OpenType fonts.
+Persistent issues with fonts are now dealt with on a per font basis in ConteXt
+using existing as well as new tweaking features. We started talking micro math
+typography. Old fonts are still supported but one has to configure the engine
+with respecty to the used technology. Another side effect is that we now store
+math character specifications in nodes instead of a number.
+
+It makes sense to simplify delimiters (just make them a mathchar) and get rid of
+the large family and char. These next in size and extensibles are to be related
+anyway so one can always make a (runtime) virtual font. The main problem is that
+we then need to refactor some tex (format) code too becuase we no longer have
+delimiters there too.
+
+--------------------------------------------------------------------------------
+dependencies
+--------------------------------------------------------------------------------
+
+There are no depencies on code outside this tree and we keep it that way. If you
+follow the TeXLive (LuaTeX) source update you'll notice that there are quite
+often updates of libraries and sometimes they give (initial) issues when being
+compiled, also because there can be further dependencies on compilers as well as
+libraries specific to a (version of) an operating system. This is not something
+that users should be bothered with.
+
+Optional libraries are really optional and although an API can change we will
+not include related code in the formal LuaMetaTeX code base. We might offer some
+in the build farm (for building libraries) but that is not a formal dependency.
+We will of course adapt code to changes in API's but also never provide more
+than a minimal interface: use Lua when more is needed.
+
+We keep in sync with Lua development, also because we consider LuaMetaTeX to be
+a nice test case. We never really have issues with Lua anyway. Maybe at some
+point I will replace the socket related code. The mimalloc libraries used gives
+a performance boost but we could do without. The build cerf library might be
+replaced by an optional but it also depends on the complex datatype being more
+mature: there is now a fundamental difference between compilers so we have a
+patched version; the code doesn't change anyway, so maybe it can be stripped.
+
+In practice there have been hardly any updates to the libraries that we do use:
+most changes are in auxiliary programs and make files anyway. When there is an
+update (most are on github) this is what happens:
+
+-- check out code
+-- compare used subset (like /src) with working copy
+-- merge with working copy if it makes sense (otherwise delay)
+-- test for a while (local compilation etc.)
+-- compare used subset again, this time with local repository
+-- merge with local repository
+-- push update to the build farm
+
+So, each change is checked twice which in practice doesn't take much time but
+gives a good idea of the kind of changes. So far we never had to roll back.
+
+We still use CWEB formatting for MetaPost which then involves a conversion to C
+code but the C code is included. This removes a depedency on the WEB toolchain.
+The Lua based converter that is part of this source tree works quite well for
+our purpose (and also gives nicer code).
+
+We don't do any architecture (CPU) or operating system specific optimizations,
+simply because there is no real gain for LuaMetaTeX. It would only introduce
+issues, a more complex build, dependencies on assembly generators, etc. which
+is a no-go.
+
+--------------------------------------------------------------------------------
+team / responsibilities
+--------------------------------------------------------------------------------
+
+The LuaTeX code base is part of the ConTeXt code base. That way we can guarantee
+its working with the ConTeXt macro package and also experiment as much as we
+like without harming this package. The ConTeXt code is maintained by Hans Hagen
+and Wolfgang Schuster with of course help and input from others (those who are
+on the mailing list will have no problem identifying who). Because we see the
+LuaMetaTeX code as part of that effort, starting with its more or less official
+release (version 2.05, early 2020), Hans and Wolfgang will be responsible for
+the code (knowing that we can always fall back on Taco) and explore further
+possibilities. Mojca Miklavec handles the compile farm, coordinates the
+distributions, deals with integration in TeXLive, etc. Alan Braslau is the first
+line tester so that in an early stage we can identify issues with for TeX,
+MetaPost, Lua and compilation on the different platforms that users have.
+
+If you run into problems with LuaMetaTeX, the ConTeXt mailing list is the place
+to go to: ntg-context@ntg.nl. Of course you can also communicate LuaTeX problems
+there, especially when you suspect that both engines share it, but for specific
+LuaTeX issues there is dev-luatex@ntg.nl where the LuaTeX team can help you
+further.
+
+This (mid 2018 - begin 2020) is the first stage of the development. Before we
+move on, we (read: users) will first test the current implementation more
+extensively over a longer period of time, something that is really needed because
+there are lots of accumulated changes, and I would not be surprised if subtle
+issues have been introduced. In the meantime we will discuss how to follow up.
+
+The version in the distribution is always tested with the ConteXt test suite,
+which hopefully uncovers issues before users notice.
+
+Stay tuned!
+--------------------------------------------------------------------------------
+
+--------------------------------------------------------------------------------
+ConTeXt websites : http://contextgarden.net http://www.pragma-ade.nl
+Development list : dev-context@ntg.nl
+Support list : context@ntg.nl
+User groups : http://ntg.nl http://tug.org etc
+--------------------------------------------------------------------------------
+
+--------------------------------------------------------------------------------
+Hans Hagen : j.hagen@xs4all.nl
+--------------------------------------------------------------------------------
diff --git a/source/luametatex/source/tex/texadjust.c b/source/luametatex/source/tex/texadjust.c
index 11d2da6ad..775fd546d 100644
--- a/source/luametatex/source/tex/texadjust.c
+++ b/source/luametatex/source/tex/texadjust.c
@@ -161,7 +161,7 @@ void tex_run_vadjust(void)
void tex_finish_vadjust_group(void)
{
- if (! tex_wrapped_up_paragraph(vadjust_par_context)) {
+ if (! tex_wrapped_up_paragraph(vadjust_par_context, 0)) {
halfword box, adjust, target; /*tex for short-term use */
tex_end_paragraph(vadjust_group, vadjust_par_context);
tex_unsave();
diff --git a/source/luametatex/source/tex/texalign.c b/source/luametatex/source/tex/texalign.c
index 7a1045fea..e82a9eaae 100644
--- a/source/luametatex/source/tex/texalign.c
+++ b/source/luametatex/source/tex/texalign.c
@@ -837,7 +837,7 @@ static int tex_aux_nested_no_align(void)
void tex_finish_no_alignment_group(void)
{
- if (! tex_wrapped_up_paragraph(no_align_par_context)) { /* needs testing */
+ if (! tex_wrapped_up_paragraph(no_align_par_context, 0)) { /* needs testing */
tex_end_paragraph(no_align_group, no_align_par_context);
tex_aux_trace_no_align("leaving");
--lmt_alignment_state.no_align_level;
@@ -2091,7 +2091,7 @@ void tex_run_alignment_end_template(void)
} else if (lmt_input_state.input_stack[lmt_input_state.base_ptr].state != token_list_state) {
tex_alignment_interwoven_error(4);
} else if (cur_group == align_group) {
- if (! tex_wrapped_up_paragraph(align_par_context)) { /* needs testing */
+ if (! tex_wrapped_up_paragraph(align_par_context, 0)) { /* needs testing */
tex_end_paragraph(align_group, align_par_context);
if (tex_aux_finish_column()) {
tex_aux_finish_row();
diff --git a/source/luametatex/source/tex/texbuildpage.c b/source/luametatex/source/tex/texbuildpage.c
index be75042eb..5079e926e 100644
--- a/source/luametatex/source/tex/texbuildpage.c
+++ b/source/luametatex/source/tex/texbuildpage.c
@@ -363,6 +363,7 @@ void tex_build_page(void)
do {
halfword current = node_next(contribute_head);
halfword type = node_type(current);
+ halfword subtype = node_subtype(current);
/*tex Update the values of |last_glue|, |last_penalty|, and |last_kern|. */
if (lmt_page_builder_state.last_glue != max_halfword) {
tex_flush_node(lmt_page_builder_state.last_glue);
@@ -372,7 +373,7 @@ void tex_build_page(void)
lmt_page_builder_state.last_kern = 0;
lmt_page_builder_state.last_boundary = 0;
lmt_page_builder_state.last_node_type = type;
- lmt_page_builder_state.last_node_subtype = node_subtype(current);
+ lmt_page_builder_state.last_node_subtype = subtype;
lmt_page_builder_state.last_extra_used = 0;
switch (type) {
case glue_node:
@@ -385,7 +386,9 @@ void tex_build_page(void)
lmt_page_builder_state.last_kern = kern_amount(current);
break;
case boundary_node:
- lmt_page_builder_state.last_boundary = boundary_data(current);
+ if (subtype == page_boundary) {
+ lmt_page_builder_state.last_boundary = boundary_data(current);
+ }
break;
}
/*tex
@@ -725,9 +728,9 @@ void tex_build_page(void)
}
}
if (badness >= awful_bad) {
- criterium = badness;
+ criterium = badness; /* trigger fireup */
} else if (penalty <= eject_penalty) {
- criterium = penalty;
+ criterium = penalty; /* trigger fireup */
} else if (badness < infinite_bad) {
criterium = badness + penalty + lmt_page_builder_state.insert_penalties;
} else {
diff --git a/source/luametatex/source/tex/texcommands.c b/source/luametatex/source/tex/texcommands.c
index 3529f24d2..ddc57f0bd 100644
--- a/source/luametatex/source/tex/texcommands.c
+++ b/source/luametatex/source/tex/texcommands.c
@@ -87,10 +87,11 @@ void tex_initialize_commands(void)
tex_primitive(tex_command, "tabskip", internal_glue_cmd, tab_skip_code, internal_glue_base);
tex_primitive(tex_command, "spaceskip", internal_glue_cmd, space_skip_code, internal_glue_base);
tex_primitive(tex_command, "xspaceskip", internal_glue_cmd, xspace_skip_code, internal_glue_base);
- tex_primitive(tex_command, "parfillleftskip", internal_glue_cmd, par_fill_left_skip_code, internal_glue_base);
- tex_primitive(tex_command, "parfillskip", internal_glue_cmd, par_fill_right_skip_code, internal_glue_base);
- tex_primitive(tex_command, "parinitleftskip", internal_glue_cmd, par_init_left_skip_code, internal_glue_base);
- tex_primitive(tex_command, "parinitrightskip", internal_glue_cmd, par_init_right_skip_code, internal_glue_base);
+ tex_primitive(tex_command, "parfillskip", internal_glue_cmd, par_fill_right_skip_code, internal_glue_base); /*tex This is more like an alias now. */
+ tex_primitive(luatex_command, "parfillleftskip", internal_glue_cmd, par_fill_left_skip_code, internal_glue_base);
+ tex_primitive(luatex_command, "parfillrightskip", internal_glue_cmd, par_fill_right_skip_code, internal_glue_base);
+ tex_primitive(luatex_command, "parinitleftskip", internal_glue_cmd, par_init_left_skip_code, internal_glue_base);
+ tex_primitive(luatex_command, "parinitrightskip", internal_glue_cmd, par_init_right_skip_code, internal_glue_base);
tex_primitive(luatex_command, "mathsurroundskip", internal_glue_cmd, math_skip_code, internal_glue_base);
tex_primitive(luatex_command, "maththreshold", internal_glue_cmd, math_threshold_code, internal_glue_base);
@@ -164,6 +165,8 @@ void tex_initialize_commands(void)
tex_primitive(tex_command, "predisplaypenalty", internal_int_cmd, pre_display_penalty_code, internal_int_base);
tex_primitive(luatex_command, "postinlinepenalty", internal_int_cmd, post_inline_penalty_code, internal_int_base);
tex_primitive(luatex_command, "preinlinepenalty", internal_int_cmd, pre_inline_penalty_code, internal_int_base);
+ tex_primitive(luatex_command, "postshortinlinepenalty", internal_int_cmd, post_short_inline_penalty_code, internal_int_base);
+ tex_primitive(luatex_command, "preshortinlinepenalty", internal_int_cmd, pre_short_inline_penalty_code, internal_int_base);
tex_primitive(tex_command, "pretolerance", internal_int_cmd, pre_tolerance_code, internal_int_base);
tex_primitive(tex_command, "relpenalty", internal_int_cmd, post_relation_penalty_code, internal_int_base); /*tex For old times sake. */
tex_primitive(tex_command, "righthyphenmin", internal_int_cmd, right_hyphen_min_code, internal_int_base);
@@ -344,7 +347,9 @@ void tex_initialize_commands(void)
tex_primitive(luatex_command, "pageboundary", boundary_cmd, page_boundary, 0);
/* tex_primitive(luatex_command, "parboundary", boundary_cmd, par_boundary, 0); */
- tex_primitive(tex_command, "penalty", penalty_cmd, normal_code, 0);
+ tex_primitive(tex_command, "penalty", penalty_cmd, normal_penalty_code, 0);
+ tex_primitive(luatex_command, "hpenalty", penalty_cmd, h_penalty_code, 0);
+ tex_primitive(luatex_command, "vpenalty", penalty_cmd, v_penalty_code, 0);
tex_primitive(tex_command, "char", char_number_cmd, char_number_code, 0);
tex_primitive(luatex_command, "glyph", char_number_cmd, glyph_number_code, 0);
@@ -659,7 +664,6 @@ void tex_initialize_commands(void)
tex_primitive(luatex_command, "meaningasis", convert_cmd, meaning_asis_code, 0); /* for manuals and articles */
tex_primitive(luatex_command, "meaningful", convert_cmd, meaning_ful_code, 0); /* full as in fil */
tex_primitive(luatex_command, "meaningles", convert_cmd, meaning_les_code, 0); /* less as in fil, can't be less than this */
- /*tex Maybe some day also |meaningonly| (no macro: in front). */
tex_primitive(tex_command, "number", convert_cmd, number_code, 0);
tex_primitive(luatex_command, "tointeger", convert_cmd, to_integer_code, 0);
tex_primitive(luatex_command, "tohexadecimal", convert_cmd, to_hexadecimal_code, 0);
@@ -675,6 +679,7 @@ void tex_initialize_commands(void)
tex_primitive(luatex_command, "csactive", convert_cmd, cs_active_code, 0);
/* tex_primitive(luatex_command, "csnamestring", convert_cmd, cs_lastname_code, 0); */
tex_primitive(luatex_command, "detokenized", convert_cmd, detokenized_code, 0);
+ tex_primitive(luatex_command, "detokened", convert_cmd, detokened_code, 0);
tex_primitive(luatex_command, "expanded", convert_cmd, expanded_code, 0);
tex_primitive(luatex_command, "semiexpanded", convert_cmd, semi_expanded_code, 0);
tex_primitive(luatex_command, "formatname", convert_cmd, format_name_code, 0);
@@ -713,6 +718,9 @@ void tex_initialize_commands(void)
tex_primitive(luatex_command, "ifincsname", if_test_cmd, if_in_csname_code, 0); /* This is obsolete and might be dropped. */
tex_primitive(luatex_command, "ifabsnum", if_test_cmd, if_abs_int_code, 0);
tex_primitive(luatex_command, "ifabsdim", if_test_cmd, if_abs_dim_code, 0);
+ tex_primitive(luatex_command, "ifintervalnum", if_test_cmd, if_interval_int_code, 0); /* playground */
+ tex_primitive(luatex_command, "ifintervaldim", if_test_cmd, if_interval_dim_code, 0); /* playground */
+ tex_primitive(luatex_command, "ifintervalfloat", if_test_cmd, if_interval_posit_code, 0); /* playground */
tex_primitive(luatex_command, "iffloat", if_test_cmd, if_posit_code, 0);
tex_primitive(luatex_command, "ifabsfloat", if_test_cmd, if_abs_posit_code, 0);
tex_primitive(luatex_command, "ifzeronum", if_test_cmd, if_zero_int_code, 0);
@@ -832,8 +840,8 @@ void tex_initialize_commands(void)
tex_primitive(luatex_command, "hjcode", hyphenation_cmd, hjcode_code, 0);
tex_primitive(tex_command, "kern", kern_cmd, normal_kern_code, 0);
- /* tex_primitive(tex_command, "hkern", kern_cmd, h_kern_code, 0); */
- /* tex_primitive(tex_command, "vkern", kern_cmd, v_kern_code, 0); */
+ tex_primitive(tex_command, "hkern", kern_cmd, h_kern_code, 0);
+ tex_primitive(tex_command, "vkern", kern_cmd, v_kern_code, 0);
/* tex_primitive(tex_command, "nonzerowidthkern", kern_cmd, non_zero_width_kern_code, 0); */ /* maybe */
tex_primitive(luatex_command, "localleftbox", local_box_cmd, local_left_box_code, 0);
@@ -1026,7 +1034,6 @@ void tex_initialize_commands(void)
tex_primitive(tex_command, "muskipdef", shorthand_def_cmd, mu_skip_def_code, 0);
tex_primitive(tex_command, "skipdef", shorthand_def_cmd, skip_def_code, 0);
tex_primitive(tex_command, "toksdef", shorthand_def_cmd, toks_def_code, 0);
- /* tex_primitive(luatex_command, "stringdef", shorthand_def_cmd, string_def_code, 0); */
tex_primitive(luatex_command, "Umathchardef", shorthand_def_cmd, math_xchar_def_code, 0);
tex_primitive(luatex_command, "Umathdictdef", shorthand_def_cmd, math_dchar_def_code, 0);
tex_primitive(luatex_command, "attributedef", shorthand_def_cmd, attribute_def_code, 0);
diff --git a/source/luametatex/source/tex/texcommands.h b/source/luametatex/source/tex/texcommands.h
index d90456f25..372066049 100644
--- a/source/luametatex/source/tex/texcommands.h
+++ b/source/luametatex/source/tex/texcommands.h
@@ -55,6 +55,7 @@
*/
/*tex
+
Some commands are shared, for instance |car_ret_cmd| is never seen in a token list so it can be
used for signaling a parameter: |out_param_cmd| in a macro body. These constants relate to the
21 bit shifting in token properties!
@@ -89,6 +90,8 @@
easier to extend alignments when we're at it because it brings some code and logic together (of
course the principles are the same, but there can be slight differences in the way errors are
reported).
+
+ Comment: experimental |string_cmd| has been removed, as we now have |\constant| flagged macros.
*/
@@ -230,7 +233,6 @@ typedef enum tex_command_code {
mathspec_cmd,
fontspec_cmd,
register_cmd, /*tex internal register (|\count|, |\dimen|, etc.) */
- /* string_cmd, */ /*tex discarded experiment but maybe ... */
combine_toks_cmd, /*tex the |toksapp| and similar token (list) combiners */
/*tex
That was the last command that could follow |\the|.
@@ -260,7 +262,6 @@ typedef enum tex_command_code {
convert_cmd, /*tex convert to text (|\number|, |\string|, etc.) */
the_cmd, /*tex expand an internal quantity (|\the| or |\unexpanded|, |\detokenize|) */
get_mark_cmd, /*tex inserted mark (|\topmark|, etc.) */
- /* string_cmd, */
/*tex
These refer to macros. We might at some point promote the tolerant ones to have their own
cmd codes. Protected macros were done with an initial token signaling that property but
@@ -459,6 +460,7 @@ typedef enum convert_codes {
cs_active_code, /*tex command code for |\csactive| */
/* cs_lastname_code, */ /*tex command code for |\cslastname| */
detokenized_code, /*tex command code for |\detokenized| */
+ detokened_code, /*tex command code for |\detokened| */
roman_numeral_code, /*tex command code for |\romannumeral| */
meaning_code, /*tex command code for |\meaning| */
meaning_full_code, /*tex command code for |\meaningfull| */
@@ -702,7 +704,6 @@ typedef enum shorthand_def_codes {
skip_def_code, /*tex |\skipdef| */
mu_skip_def_code, /*tex |\muskipdef| */
toks_def_code, /*tex |\toksdef| */
- /* string_def_code, */
lua_def_code, /*tex |\luadef| */
integer_def_code,
posit_def_code,
@@ -1199,13 +1200,21 @@ typedef enum remove_item_codes {
typedef enum kern_codes {
normal_kern_code,
- h_kern_code, /* maybe */
- v_kern_code, /* maybe */
+ h_kern_code,
+ v_kern_code,
non_zero_width_kern_code, /* maybe */
} kern_codes;
# define last_kern_code normal_kern_code
+typedef enum penalty_codes {
+ normal_penalty_code,
+ h_penalty_code,
+ v_penalty_code,
+} penalty_codes;
+
+# define last_penalty_code normal_penalty_code
+
typedef enum tex_mskip_codes {
normal_mskip_code,
atom_mskip_code,
diff --git a/source/luametatex/source/tex/texconditional.c b/source/luametatex/source/tex/texconditional.c
index b2219e2ab..b4541f342 100644
--- a/source/luametatex/source/tex/texconditional.c
+++ b/source/luametatex/source/tex/texconditional.c
@@ -578,6 +578,15 @@ void tex_conditional_if(halfword code, int unless)
case if_zero_int_code:
result = tex_scan_int(0, NULL) == 0;
goto RESULT;
+ case if_interval_int_code:
+ {
+ scaled n0 = tex_scan_int(0, NULL);
+ scaled n1 = tex_scan_int(0, NULL);
+ scaled n2 = tex_scan_int(0, NULL);
+ result = n1 - n2;
+ result = result == 0 ? 1 : (result > 0 ? result <= n0 : -result <= n0);
+ }
+ goto RESULT;
case if_abs_posit_code:
case if_posit_code:
{
@@ -608,6 +617,15 @@ void tex_conditional_if(halfword code, int unless)
case if_zero_posit_code:
result = tex_posit_eq_zero(tex_scan_posit(0));
goto RESULT;
+ case if_interval_posit_code:
+ {
+ halfword n0 = tex_scan_posit(0);
+ halfword n1 = tex_scan_posit(0);
+ halfword n2 = tex_scan_posit(0);
+ result = tex_posit_sub(n1, n2);
+ result = tex_posit_eq_zero(result) ? 1 : (tex_posit_gt_zero(result) ? tex_posit_le(result, n0) : tex_posit_le(tex_posit_neg(result), n0));
+ }
+ goto RESULT;
case if_abs_dim_code:
case if_dim_code:
{
@@ -638,6 +656,15 @@ void tex_conditional_if(halfword code, int unless)
case if_zero_dim_code:
result = tex_scan_dimen(0, 0, 0, 0, NULL) == 0;
goto RESULT;
+ case if_interval_dim_code:
+ {
+ scaled n0 = tex_scan_dimen(0, 0, 0, 0, NULL);
+ scaled n1 = tex_scan_dimen(0, 0, 0, 0, NULL);
+ scaled n2 = tex_scan_dimen(0, 0, 0, 0, NULL);
+ result = n1 - n2;
+ result = result == 0 ? 1 : (result > 0 ? result <= n0 : -result <= n0);
+ }
+ goto RESULT;
case if_odd_code:
result = odd(tex_scan_int(0, NULL));
goto RESULT;
diff --git a/source/luametatex/source/tex/texconditional.h b/source/luametatex/source/tex/texconditional.h
index 41b33dc36..0790d86f9 100644
--- a/source/luametatex/source/tex/texconditional.h
+++ b/source/luametatex/source/tex/texconditional.h
@@ -55,12 +55,15 @@ typedef enum if_test_codes {
if_int_code, /*tex |\ifnum| */
if_abs_int_code, /*tex |\ifabsnum| */
if_zero_int_code, /*tex |\ifzeronum|*/
+ if_interval_int_code,
if_posit_code,
if_abs_posit_code,
if_zero_posit_code,
+ if_interval_posit_code,
if_dim_code, /*tex |\ifdim| */
if_abs_dim_code, /*tex |\ifabsdim| */
if_zero_dim_code, /*tex |\ifzerodim| */
+ if_interval_dim_code,
if_odd_code, /*tex |\ifodd| */
if_vmode_code, /*tex |\ifvmode| */
if_hmode_code, /*tex |\ifhmode| */
diff --git a/source/luametatex/source/tex/texdumpdata.h b/source/luametatex/source/tex/texdumpdata.h
index e21177713..be45c0045 100644
--- a/source/luametatex/source/tex/texdumpdata.h
+++ b/source/luametatex/source/tex/texdumpdata.h
@@ -55,7 +55,7 @@
*/
-# define luametatex_format_fingerprint 689
+# define luametatex_format_fingerprint 690
/* These end up in the string pool. */
diff --git a/source/luametatex/source/tex/texequivalents.c b/source/luametatex/source/tex/texequivalents.c
index c3cbf087d..4f8f789b7 100644
--- a/source/luametatex/source/tex/texequivalents.c
+++ b/source/luametatex/source/tex/texequivalents.c
@@ -1542,9 +1542,7 @@ void tex_unsave(void)
}
tex_local_control(1);
}
-
unsave_attribute_state_before();
-
tex_unsave_math_codes(cur_level);
tex_unsave_cat_codes(cat_code_table_par, cur_level);
tex_unsave_text_codes(cur_level);
diff --git a/source/luametatex/source/tex/texequivalents.h b/source/luametatex/source/tex/texequivalents.h
index a2ea8762d..5ecf73ee0 100644
--- a/source/luametatex/source/tex/texequivalents.h
+++ b/source/luametatex/source/tex/texequivalents.h
@@ -435,6 +435,8 @@ typedef enum int_codes {
post_display_penalty_code, /*tex penalty for breaking just after a displayed formula */
pre_inline_penalty_code, /*tex penalty for breaking just before an inlined formula */
post_inline_penalty_code, /*tex penalty for breaking just after an inlined formula */
+ pre_short_inline_penalty_code, /*tex penalty for breaking just before a single character inlined formula */
+ post_short_inline_penalty_code, /*tex penalty for breaking just after a single character inlined formula */
inter_line_penalty_code, /*tex additional penalty between lines */
double_hyphen_demerits_code, /*tex demerits for double hyphen break */
final_hyphen_demerits_code, /*tex demerits for final hyphen break */
@@ -1050,6 +1052,19 @@ typedef enum tex_alignment_context_codes {
wrapup_pass_alignment_context,
} tex_alignment_context_codes;
+
+typedef enum tex_breaks_context_codes {
+ initialize_show_breaks_context,
+ start_show_breaks_context,
+ list_show_breaks_context,
+ stop_show_breaks_context,
+ collect_show_breaks_context,
+ line_show_breaks_context,
+ delete_show_breaks_context,
+ report_show_breaks_context,
+ wrapup_show_breaks_context,
+} tex_breaks_context_codes;
+
typedef enum tex_page_context_codes {
box_page_context,
end_page_context,
@@ -1392,6 +1407,8 @@ extern void tex_forced_word_define (int g, halfword p, singleword flag, halfword
# define post_display_penalty_par count_parameter(post_display_penalty_code)
# define pre_inline_penalty_par count_parameter(pre_inline_penalty_code)
# define post_inline_penalty_par count_parameter(post_inline_penalty_code)
+# define pre_short_inline_penalty_par count_parameter(pre_short_inline_penalty_code)
+# define post_short_inline_penalty_par count_parameter(post_short_inline_penalty_code)
# define local_interline_penalty_par count_parameter(local_interline_penalty_code)
# define local_broken_penalty_par count_parameter(local_broken_penalty_code)
@@ -1664,6 +1681,7 @@ typedef enum normalize_line_mode_bits {
typedef enum normalize_par_mode_bits {
normalize_par_mode = 0x0001,
flatten_v_leaders_mode = 0x0002, /* used to be 0x200 */
+ limit_prev_graf_mode = 0x0004,
} normalize_par_mode_bits;
# define normalize_line_mode_permitted(a,b) ((a & b) == b)
diff --git a/source/luametatex/source/tex/texexpand.c b/source/luametatex/source/tex/texexpand.c
index 706972bfe..d0de0c9f1 100644
--- a/source/luametatex/source/tex/texexpand.c
+++ b/source/luametatex/source/tex/texexpand.c
@@ -535,14 +535,6 @@ void tex_expand_current_token(void)
}
break;
}
- /*
- case string_cmd:
- {
- halfword head = str_toks(str_lstring(cs_offset_value + cur_chr), NULL);
- begin_inserted_list(head);
- break;
- }
- */
default:
/* Maybe ... or maybe an option */
// if (lmt_expand_state.cs_name_level == 0) {
@@ -636,21 +628,13 @@ static int tex_aux_collect_cs_tokens(halfword *p, int *n)
case spacer_cmd:
case letter_cmd:
case other_char_cmd:
- case active_char_cmd: /* new */
- // cur_tok = token_val(cur_cmd, cur_chr);
- // *p = tex_store_new_token(*p, cur_tok);
+ case active_char_cmd: /* new, here we don't expand */
*p = tex_store_new_token(*p, token_val(cur_cmd, cur_chr));
*n += 1;
break;
/* case comment_cmd: */
/* case invalid_char_cmd: */
- /*
- case string_cmd:
- cur_tok = token_val(cur_cmd, cur_chr);
- *p = store_new_token(*p, cur_tok);
- *n += str_length(cs_offset_value + cur_chr);
- break;
- */
+ /* break; */
case call_cmd:
case tolerant_call_cmd:
if (get_token_reference(cur_chr) == max_token_reference) { // ! get_token_parameters(cur_chr)) {
diff --git a/source/luametatex/source/tex/texinputstack.c b/source/luametatex/source/tex/texinputstack.c
index 7780c17bc..92a9c8a0c 100644
--- a/source/luametatex/source/tex/texinputstack.c
+++ b/source/luametatex/source/tex/texinputstack.c
@@ -184,8 +184,15 @@ static int tex_aux_room_on_parameter_stack(void) /* quite similar to save_stack
void tex_copy_to_parameter_stack(halfword *pstack, int n)
{
if (tex_aux_room_on_parameter_stack()) {
+if (n == 1) {
+ lmt_input_state.parameter_stack[lmt_input_state.parameter_stack_data.ptr++] = pstack[0];
+//} else if (n == 2) {
+// lmt_input_state.parameter_stack[lmt_input_state.parameter_stack_data.ptr++] = pstack[0];
+// lmt_input_state.parameter_stack[lmt_input_state.parameter_stack_data.ptr++] = pstack[1];
+} else {
memcpy(&lmt_input_state.parameter_stack[lmt_input_state.parameter_stack_data.ptr], pstack, n * sizeof(halfword));
lmt_input_state.parameter_stack_data.ptr += n;
+}
}
}
diff --git a/source/luametatex/source/tex/texinserts.c b/source/luametatex/source/tex/texinserts.c
index 15d4b8c53..5a76ebaad 100644
--- a/source/luametatex/source/tex/texinserts.c
+++ b/source/luametatex/source/tex/texinserts.c
@@ -456,7 +456,7 @@ void tex_run_insert(void)
void tex_finish_insert_group(void)
{
- if (! tex_wrapped_up_paragraph(insert_par_context)) {
+ if (! tex_wrapped_up_paragraph(insert_par_context, 0)) {
halfword p, q; /*tex for short-term use */
scaled d; /*tex holds |split_max_depth| in |insert_group| */
halfword f; /*tex holds |floating_penalty| in |insert_group| */
diff --git a/source/luametatex/source/tex/texlinebreak.c b/source/luametatex/source/tex/texlinebreak.c
index af60f1c40..e009d3e50 100644
--- a/source/luametatex/source/tex/texlinebreak.c
+++ b/source/luametatex/source/tex/texlinebreak.c
@@ -53,6 +53,14 @@
understand. (Remark for myself: the lua variant that i use for playing around occasionally is
not in sync with the code here!)
+ I played a bit with prerolling: make a copy, run the par builder, afterwards collect the
+ result in a box that then can be consulted: wd, ht, dp, quality, hyphens, and especially
+ shape fitting (which was the reason, because |\hangafter| assumes lines and esp with math a
+ line is somewhat unpredictable so we get bad fitting). In the end we decided that it was kind
+ of useless because of the unlikely usage scenario. But I might pick up on it. Of course it can
+ be done in \LUA\ but we don't want the associated performance hit (management overhead) and
+ dealing with (progressive) solution oscillating is also an issue.
+
*/
linebreak_state_info lmt_linebreak_state = {
@@ -859,12 +867,14 @@ static halfword tex_aux_clean_up_the_memory(halfword p)
halfword q = node_next(active_head);
while (q != active_head) {
p = node_next(q);
+ // tex_free_node(q, get_node_size(node_type(q))); // less overhead & testing
tex_flush_node(q);
q = p;
}
q = lmt_linebreak_state.passive;
while (q) {
p = node_next(q);
+ // tex_free_node(q, get_node_size(node_type(q))); // less overhead & testing
tex_flush_node(q);
q = p;
}
@@ -907,12 +917,13 @@ inline static void tex_aux_reset_disc_target(halfword adjust_spacing, scaled *ta
inline static void tex_aux_set_target_to_source(halfword adjust_spacing, scaled target[], const scaled source[])
{
+ // memcpy(&target[total_glue_amount], &source[total_glue_amount], font_shrink_amount * sizeof(halfword));
for (int i = total_glue_amount; i <= total_shrink_amount; i++) {
target[i] = source[i];
}
if (adjust_spacing) {
- target[font_shrink_amount] = source[font_shrink_amount];
target[font_stretch_amount] = source[font_stretch_amount];
+ target[font_shrink_amount] = source[font_shrink_amount];
}
}
@@ -1040,7 +1051,7 @@ static void tex_aux_add_to_widths(halfword s, int adjust_spacing, int adjust_spa
while (s) {
switch (node_type(s)) {
case glyph_node:
- widths[total_glue_amount] += tex_glyph_width(s);
+ widths[total_glue_amount] += tex_glyph_width_ex(s); // ex
if (adjust_spacing && ! tex_has_glyph_option(s, glyph_option_no_expansion) && tex_aux_check_expand_pars(adjust_spacing_step, glyph_font(s))) {
lmt_packaging_state.previous_char_ptr = s;
widths[font_stretch_amount] += tex_char_stretch(s);
@@ -1056,7 +1067,7 @@ static void tex_aux_add_to_widths(halfword s, int adjust_spacing, int adjust_spa
break;
case glue_node:
widths[total_glue_amount] += glue_amount(s);
- widths[2 + glue_stretch_order(s)] += glue_stretch(s);
+ widths[total_stretch_amount + glue_stretch_order(s)] += glue_stretch(s);
widths[total_shrink_amount] += glue_shrink(s);
break;
case kern_node:
@@ -1093,7 +1104,7 @@ static void tex_aux_sub_from_widths(halfword s, int adjust_spacing, int adjust_s
/*tex Subtract the width of node |s| from |break_width|; */
switch (node_type(s)) {
case glyph_node:
- widths[total_glue_amount] -= tex_glyph_width(s);
+ widths[total_glue_amount] -= tex_glyph_width_ex(s); // ex
if (adjust_spacing && ! tex_has_glyph_option(s, glyph_option_no_expansion) && tex_aux_check_expand_pars(adjust_spacing_step, glyph_font(s))) {
lmt_packaging_state.previous_char_ptr = s;
widths[font_stretch_amount] -= tex_char_stretch(s);
@@ -1108,9 +1119,9 @@ static void tex_aux_sub_from_widths(halfword s, int adjust_spacing, int adjust_s
widths[total_glue_amount] -= rule_width(s);
break;
case glue_node:
- widths[total_glue_amount] -= glue_amount(s);
- widths[2 + glue_stretch_order(s)] -= glue_stretch(s);
- widths[total_shrink_amount] -= glue_shrink(s);
+ widths[total_glue_amount] -= glue_amount(s);
+ widths[total_stretch_amount + glue_stretch_order(s)] -= glue_stretch(s);
+ widths[total_shrink_amount] -= glue_shrink(s);
break;
case kern_node:
widths[total_glue_amount] -= kern_amount(s);
@@ -1205,7 +1216,7 @@ static void tex_aux_compute_break_width(int break_type, int adjust_spacing, int
case glue_node:
/*tex Subtract glue from |break_width|; */
lmt_linebreak_state.break_width[total_glue_amount] -= glue_amount(s);
- lmt_linebreak_state.break_width[2 + glue_stretch_order(s)] -= glue_stretch(s);
+ lmt_linebreak_state.break_width[total_stretch_amount + glue_stretch_order(s)] -= glue_stretch(s);
lmt_linebreak_state.break_width[total_shrink_amount] -= glue_shrink(s);
break;
case penalty_node:
@@ -1222,7 +1233,7 @@ static void tex_aux_compute_break_width(int break_type, int adjust_spacing, int
lmt_linebreak_state.break_width[total_glue_amount] -= math_surround(s);
} else {
lmt_linebreak_state.break_width[total_glue_amount] -= math_amount(s);
- lmt_linebreak_state.break_width[2 + math_stretch_order(s)] -= math_stretch(s);
+ lmt_linebreak_state.break_width[total_stretch_amount + math_stretch_order(s)] -= math_stretch(s);
lmt_linebreak_state.break_width[total_shrink_amount] -= math_shrink(s);
}
break;
@@ -1233,30 +1244,94 @@ static void tex_aux_compute_break_width(int break_type, int adjust_spacing, int
}
}
-static void tex_aux_print_break_node(halfword q, halfword fit_class, halfword break_type, halfword cur_p, const line_break_properties *properties)
+static void tex_aux_initialize_show_break_node(int callback_id)
+{
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "d->", initialize_show_breaks_context);
+}
+
+static void tex_aux_start_show_break_node(int callback_id, int pass)
+{
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "dd->", start_show_breaks_context, pass);
+}
+
+static void tex_aux_stop_show_break_node(int callback_id)
+{
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "d->", stop_show_breaks_context);
+}
+
+static void tex_aux_collect_show_break_node(int callback_id)
+{
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "d->", collect_show_breaks_context);
+}
+
+static void tex_aux_line_show_break_node(int callback_id)
+{
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "dNdddd->", line_show_breaks_context,
+ lmt_linebreak_state.just_box, lmt_packaging_state.last_badness, lmt_packaging_state.last_overshoot,
+ lmt_packaging_state.total_shrink[normal_glue_order], lmt_packaging_state.total_stretch[normal_glue_order]
+ );
+}
+
+static void tex_aux_delete_break_node(halfword active, halfword passive, int callback_id)
+{
+ (void) active;
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "dd->", delete_show_breaks_context,
+ passive_serial(passive)
+ );
+}
+
+static void tex_aux_wrapup_show_break_node(int callback_id)
+{
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "d->", wrapup_show_breaks_context);
+}
+
+static void tex_aux_show_break_node(halfword active, halfword passive, int callback_id, int pass, halfword *demerits)
+{
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "ddddddddNdd->r", report_show_breaks_context,
+ pass,
+ passive_serial(passive),
+ passive_prev_break(passive) ? passive_serial(passive_prev_break(passive)) : 0,
+ active_line_number(active) - 1,
+ node_type(active),
+ active_fitness(active),
+ active_total_demerits(active), /* demerits */
+ passive_cur_break(passive),
+ lmt_linebreak_state.do_last_line_fit ? active_short(active) : 0,
+ lmt_linebreak_state.do_last_line_fit ? active_glue(active) : 0,
+ demerits /* optionally changed */
+ );
+}
+
+static void tex_aux_list_break_node(halfword passive, int callback_id)
+{
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "dd->", list_show_breaks_context,
+ passive_serial(passive)
+ );
+}
+
+static void tex_aux_print_break_node(halfword active, halfword passive)
{
- (void) properties;
/*tex Print a symbolic description of the new break node. */
tex_print_format(
"%l[break: serial %i, line %i.%i,%s demerits %i, ",
- passive_serial(lmt_linebreak_state.passive),
- active_line_number(q) - 1,
- fit_class,
- break_type == hyphenated_node ? " hyphenated, " : "",
- active_total_demerits(q)
+ passive_serial(passive),
+ active_line_number(active) - 1,
+ active_fitness(active),
+ node_type(active) == hyphenated_node ? " hyphenated, " : "",
+ active_total_demerits(active)
);
if (lmt_linebreak_state.do_last_line_fit) {
/*tex Print additional data in the new active node. */
tex_print_format(
" short %D, %s %D, ",
- active_short(q), pt_unit,
- cur_p ? "glue" : "active",
- active_glue(q), pt_unit
+ active_short(active), pt_unit,
+ passive_cur_break(passive) ? "glue" : "active",
+ active_glue(active), pt_unit
);
}
tex_print_format(
"previous %i]",
- passive_prev_break(lmt_linebreak_state.passive) ? passive_serial(passive_prev_break(lmt_linebreak_state.passive)) : 0
+ passive_prev_break(passive) ? passive_serial(passive_prev_break(passive)) : null
);
}
@@ -1303,9 +1378,6 @@ static void tex_aux_print_feasible_break(halfword cur_p, halfword r, halfword b,
);
}
-# define total_font_stretch cur_active_width[font_stretch_amount]
-# define total_font_shrink cur_active_width[font_shrink_amount]
-
/*tex We implement this one later on. */
/*
@@ -1313,7 +1385,7 @@ static void tex_aux_print_feasible_break(halfword cur_p, halfword r, halfword b,
trickery depending on it.
*/
-static void tex_aux_post_line_break(const line_break_properties *properties, halfword line_break_dir);
+static void tex_aux_post_line_break(const line_break_properties *properties, halfword line_break_dir, int callback_id);
/*tex
@@ -1363,28 +1435,52 @@ static void tex_aux_post_line_break(const line_break_properties *properties, hal
good estimates from Don Knuth here, it would be pretentious to suggest that I really did
research this fuzzy topic (if it was worth the effort at all).
+ Here |large_width_excess| is 110.32996pt while |small_stretchability| equals 25.38295pt.
+
+*/
+
+/*tex
+ Around 2023-05-24 Mikael Sundqvist and I did numerous tests with the badness function below in
+ comparison with the variant mentioned in Digital Typography (DEK) and we observed that indeed
+ both functions behave pretty close (emulations with lua, mathematica etc). In practice one can
+ get different badness values (especially low numbers). We ran some test on documents and on
+ hundreds of pages one can get a few different decisions. The main reason for looking into this
+ was that we were exploring a bit more visual approach to deciding on what penalties to use in
+ the math inter-atom spacing in \CONTEXT\ (where we use a more granular class model). In the end
+ the magic criteria became even more magic (and impressive). BTW, indeed we could get these 1095
+ different badness cases with as maximum calculated one 8189.
*/
halfword tex_badness(scaled t, scaled s)
{
- /*tex Approximation to $\alpha t/s$, where $\alpha^3\approx 100\cdot2^{18}$ */
+ /*tex Approximation to $\alpha t/s$, where $\alpha^3 \approx 100 \cdot 2^{18}$ */
if (t == 0) {
return 0;
} else if (s <= 0) {
return infinite_bad;
} else {
- /*tex $297^3=99.94\times2^{18}$ */
- if (t <= large_width_excess) {
- t = (t * 297) / s;
- } else if (s >= small_stretchability) {
- t = t / (s / 297);
- }
- if (t > 1290) {
- /*tex $1290^3<2^{31}<1291^3$ */
+ /*tex $297^3 = 99.94 \times 2^{18}$ */
+ if (t <= large_width_excess) {
+ t = (t * 297) / s; /* clipping by integer division */
+ } else if (s >= small_stretchability) {
+ t = t / (s / 297); /* clipping by integer division */
+ } else {
+ /*tex
+ When we end up here |t| is pretty large so we can as well save a test and return
+ immediately. (HH & MS: we tested this while cheating a bit because this function
+ is seldom entered with values that make us end up here.)
+ */
+ return infinite_bad;
+ }
+ if (t > 1290) {
+ /*tex As $1290^3 < 2^{31} < 1291^3$ we catch an overflow here. */ /* actually badness 8189 */
return infinite_bad;
} else {
- /*tex This is $t^3/2^{18}$, rounded to the nearest integer. */
- return ((t * t * t + 0400000) / 01000000);
+ /*tex 297*297*297 == 26198073 / 100 => 261981 */
+ /*tex This is $t^3 / 2^{18}$, rounded to the nearest integer */
+ return (t * t * t + 0400000) / 01000000; /* 0400000/01000000 == 1/2 */
+ // return (t * t * t + 0x20000) / 0x40000;
+ // return (t * t * t + 131072) / 262144;
}
}
}
@@ -1410,13 +1506,6 @@ inline static void tex_split_line_break_criterium(halfword criterium, halfword *
inline static halfword tex_normalized_loose_badness(halfword b, halfword loose, halfword semi_loose, halfword decent)
{
- // if (b > loose_criterium) {
- // return very_loose_fit;
- // } else if (b > decent_criterium) {
- // return loose_fit;
- // } else {
- // return decent_fit;
- // }
if (b > loose) {
return very_loose_fit;
} else if (b > semi_loose) {
@@ -1430,11 +1519,6 @@ inline static halfword tex_normalized_loose_badness(halfword b, halfword loose,
inline static halfword tex_normalized_tight_badness(halfword b, halfword decent, halfword semi_tight)
{
- // if (b > decent_criterium) {
- // return tight_fit;
- // } else {
- // return decent_fit;
- // }
if (b > semi_tight) {
return semi_tight_fit;
} else if (b > decent) {
@@ -1444,78 +1528,134 @@ inline static halfword tex_normalized_tight_badness(halfword b, halfword decent,
}
}
+static void tex_check_protrusion_shortfall(halfword r, halfword first_p, halfword cur_p, halfword *shortfall)
+{
+ // if (line_break_dir == dir_righttoleft) {
+ // /*tex Not now, we need to keep more track. */
+ // } else {
+ halfword o = null;
+ halfword l = active_break_node(r) ? passive_cur_break(active_break_node(r)) : first_p;
+ if (cur_p) {
+ o = node_prev(cur_p);
+ if (node_next(o) != cur_p) {
+ tex_normal_error("linebreak", "the node list is messed up");
+ }
+ }
+ /*tex
+
+ The last characters (hyphenation character) if these two list should always be
+ the same anyway, so we just look at |pre_break|. Let's look at the right margin
+ first.
+
+ */
+ if (cur_p && node_type(cur_p) == disc_node && disc_pre_break_head(cur_p)) {
+ /*tex
+ A |disc_node| with non-empty |pre_break|, protrude the last char of
+ |pre_break|:
+ */
+ o = disc_pre_break_tail(cur_p);
+ } else {
+ o = tex_aux_find_protchar_right(l, o);
+ }
+ if (o && node_type(o) == glyph_node) {
+ shortfall += tex_char_protrusion(o, right_margin_kern_subtype);
+ // char_pw_kern(o, right_margin_kern, &margin_kern_stretch, &margin_kern_shrink);
+ }
+ /*tex now the left margin */
+ if (l && (node_type(l) == disc_node) && (disc_post_break_head(l))) {
+ /*tex The first char could be a disc! Protrude the first char. */
+ o = disc_post_break_head(l);
+ } else {
+ o = tex_aux_find_protchar_left(l, 1);
+ }
+ if (o && node_type(o) == glyph_node) {
+ shortfall += tex_char_protrusion(o, left_margin_kern_subtype);
+ // char_pw_kern(o, left_margin_kern, &margin_kern_stretch, &margin_kern_shrink);
+ }
+ // }
+}
+
static void tex_aux_try_break(
const line_break_properties *properties,
- halfword pi, /* a penalty */
+ halfword penalty,
halfword break_type,
halfword first_p,
- halfword cur_p
+ halfword cur_p,
+ int callback_id,
+ int pass
)
{
- /*tex runs through the active list */
- halfword r;
/*tex stays a step behind |r| */
halfword prev_r = active_head;
/*tex a step behind |prev_r|, if |type(prev_r) = delta_node| */
halfword prev_prev_r = null;
+ /*tex distance from current active node */
+ scaled cur_active_width[n_of_glue_amounts] = { 0 };
+ /*tex
+ These status arrays are global to the main loop and will be initialized as we go.
+ */
+ halfword best_place[n_of_finess_values];
+ halfword best_place_line[n_of_finess_values];
+ scaled best_place_short[n_of_finess_values];
+ scaled best_place_glue[n_of_finess_values];
+ /*
+ These are more local but we keep them here because of readability.
+ */
+ /*tex badness of test line */
+ halfword badness = 0;
+ /*tex demerits of test line */
+ int demerits = 0;
+ /*tex glue stretch or shrink of test line, adjustment for last line */
+ scaled glue = 0;
+ /*tex used in badness calculations */
+ scaled shortfall = 0;
/*tex maximum line number in current equivalence class of lines */
- halfword old_l = 0;
+ halfword old_line = 0;
/*tex have we found a feasible break at |cur_p|? */
int no_break_yet = 1;
- /*tex line number of current active node */
- halfword l;
/*tex should node |r| remain in the active list? */
int node_r_stays_active;
- /*tex the current line will be justified to this width */
- scaled line_width = 0;
/*tex possible fitness class of test line */
halfword fit_class;
- /*tex badness of test line */
- halfword b;
- /*tex demerits of test line */
- int d;
/*tex has |d| been forced to zero? */
int artificial_demerits;
- /*tex used in badness calculations */
- scaled shortfall = 0;
- /*tex glue stretch or shrink of test line, adjustment for last line */
- scaled g = 0;
- /*tex distance from current active node */
- scaled cur_active_width[10] = { 0 };
- halfword best_place[n_of_finess_values];
- halfword best_place_line[n_of_finess_values];
- scaled best_place_short[n_of_finess_values];
- scaled best_place_glue[n_of_finess_values];
- /*tex Experiment */
+ /*tex the current line will be justified to this width */
+ scaled line_width = 0;
+ /*tex line number of current active node */
+ halfword line = 0;
+ /*tex
+ We have added an extra category, just as experiment. In practice there is very little
+ to gain here as it becomes kind of fuzzy and DEK values are quite okay.
+ */
halfword semi_tight, decent, semi_loose, loose;
- /* in par node */
+ /*tex in par node */
tex_split_line_break_criterium(line_break_criterium_par, &semi_tight, &decent, &semi_loose, &loose);
/*tex Make sure that |pi| is in the proper range; */
- if (pi >= infinite_penalty) {
+ if (penalty >= infinite_penalty) {
/*tex this breakpoint is inhibited by infinite penalty */
return;
- } else if (pi <= -infinite_penalty) {
+ } else if (penalty <= -infinite_penalty) {
/*tex this breakpoint will be forced */
- pi = eject_penalty;
+ penalty = eject_penalty;
}
tex_aux_set_target_to_source(properties->adjust_spacing, cur_active_width, lmt_linebreak_state.active_width);
while (1) {
- r = node_next(prev_r);
+ /*tex Here |r| runs through the active list: */
+ halfword r = node_next(prev_r);
/*tex
If node |r| is of type |delta_node|, update |cur_active_width|, set |prev_r| and
|prev_prev_r|, then |goto continue|. The following code uses the fact that |type
(active) <> delta_node|.
- Here we get: |unhyphenated_node|, |hyphenated_node, |delta_node|, |passive_node|
-
*/
if (node_type(r) == delta_node) {
- /*tex implicit */
tex_aux_add_to_target_from_delta(properties->adjust_spacing, cur_active_width, r);
prev_prev_r = prev_r;
prev_r = r;
continue;
+ } else {
+ /*tex We have an |unhyphenated_node| or |hyphenated_node|. */
}
/*tex
@@ -1528,10 +1668,10 @@ static void tex_aux_try_break(
that |r = active| and |line_number (active) > old_l|.
*/
- l = active_line_number(r);
- if (l > old_l) {
- /*tex now we are no longer in the inner loop */
- if ((lmt_linebreak_state.minimum_demerits < awful_bad) && ((old_l != lmt_linebreak_state.easy_line) || (r == active_head))) {
+ line = active_line_number(r);
+ if (line > old_line) {
+ /*tex Now we are no longer in the inner loop (well ...). */
+ if ((lmt_linebreak_state.minimum_demerits < awful_bad) && ((old_line != lmt_linebreak_state.easy_line) || (r == active_head))) {
/*tex
Create new active nodes for the best feasible breaks just found. It is not
@@ -1578,53 +1718,55 @@ static void tex_aux_try_break(
Insert a new active node from |best_place [fit_class]| to |cur_p|. When
we create an active node, we also create the corresponding passive node.
+ In the passive node we also keep track of the subparagraph penalties.
*/
- halfword q = tex_new_node(passive_node, (quarterword) very_loose_fit);
- node_next(q) = lmt_linebreak_state.passive;
- lmt_linebreak_state.passive = q;
- passive_cur_break(q) = cur_p;
- ++lmt_linebreak_state.pass_number;
- passive_serial(q) = lmt_linebreak_state.pass_number;
- passive_prev_break(q) = best_place[fit_class];
- /*tex
-
- Here we keep track of the subparagraph penalties in the break nodes.
-
- */
- passive_pen_inter(q) = lmt_linebreak_state.internal_penalty_interline;
- passive_pen_broken(q) = lmt_linebreak_state.internal_penalty_broken;
- passive_last_left_box(q) = lmt_linebreak_state.internal_left_box;
- passive_last_left_box_width(q) = lmt_linebreak_state.internal_left_box_width;
- if (passive_prev_break(q)) {
- passive_left_box(q) = passive_last_left_box(passive_prev_break(q));
- passive_left_box_width(q) = passive_last_left_box_width(passive_prev_break(q));
+ halfword passive = tex_new_node(passive_node, (quarterword) very_loose_fit);
+ halfword active = tex_new_node((quarterword) break_type, (quarterword) fit_class);
+ halfword prev_break = best_place[fit_class];
+ /*tex Initialize the passive node: */
+ passive_cur_break(passive) = cur_p;
+ passive_serial(passive) = ++lmt_linebreak_state.pass_number;
+ passive_prev_break(passive) = prev_break;
+ passive_pen_inter(passive) = lmt_linebreak_state.internal_penalty_interline;
+ passive_pen_broken(passive) = lmt_linebreak_state.internal_penalty_broken;
+ passive_last_left_box(passive) = lmt_linebreak_state.internal_left_box;
+ passive_last_left_box_width(passive) = lmt_linebreak_state.internal_left_box_width;
+ if (prev_break) {
+ passive_left_box(passive) = passive_last_left_box(prev_break);
+ passive_left_box_width(passive) = passive_last_left_box_width(prev_break);
} else {
- passive_left_box(q) = lmt_linebreak_state.init_internal_left_box;
- passive_left_box_width(q) = lmt_linebreak_state.init_internal_left_box_width;
+ passive_left_box(passive) = lmt_linebreak_state.init_internal_left_box;
+ passive_left_box_width(passive) = lmt_linebreak_state.init_internal_left_box_width;
}
- passive_right_box(q) = lmt_linebreak_state.internal_right_box;
- passive_right_box_width(q) = lmt_linebreak_state.internal_right_box_width;
- passive_middle_box(q) = lmt_linebreak_state.internal_middle_box;
- q = tex_new_node((quarterword) break_type, (quarterword) fit_class);
- active_break_node(q) = lmt_linebreak_state.passive;
- active_line_number(q) = best_place_line[fit_class] + 1;
- active_total_demerits(q) = lmt_linebreak_state.minimal_demerits[fit_class];
+ passive_right_box(passive) = lmt_linebreak_state.internal_right_box;
+ passive_right_box_width(passive) = lmt_linebreak_state.internal_right_box_width;
+ passive_middle_box(passive) = lmt_linebreak_state.internal_middle_box;
+ /*tex Initialize the active node: */
+ active_break_node(active) = passive;
+ active_line_number(active) = best_place_line[fit_class] + 1;
+ active_total_demerits(active) = lmt_linebreak_state.minimal_demerits[fit_class];
+ // active_reserved(active) = lmt_linebreak_state.pass_number;
if (lmt_linebreak_state.do_last_line_fit) {
- /*tex
-
- Store additional data in the new active node. Here we save these
- data in the active node representing a potential line break.
-
- */
- active_short(q) = best_place_short[fit_class];
- active_glue(q) = best_place_glue[fit_class];
+ /*tex Store additional data in the new active node. */
+ active_short(active) = best_place_short[fit_class];
+ active_glue(active) = best_place_glue[fit_class];
+ }
+ /*tex Append the passive node. */
+ node_next(passive) = lmt_linebreak_state.passive;
+ lmt_linebreak_state.passive = passive;
+ /*tex Append the active node. */
+ node_next(active) = r;
+ node_next(prev_r) = active;
+ prev_r = active;
+ /* */
+ if (callback_id) {
+ halfword demerits = active_total_demerits(active);
+ tex_aux_show_break_node(active, passive, callback_id, pass, &demerits);
+ active_total_demerits(active) = demerits;
}
- node_next(q) = r;
- node_next(prev_r) = q;
- prev_r = q;
if (properties->tracing_paragraphs > 0) {
- tex_aux_print_break_node(q, fit_class, break_type, cur_p, properties);
+ tex_aux_print_break_node(active, passive);
}
}
lmt_linebreak_state.minimal_demerits[fit_class] = awful_bad;
@@ -1638,12 +1780,12 @@ static void tex_aux_try_break(
*/
if (r != active_head) {
- halfword q = tex_new_node(delta_node, (quarterword) very_loose_fit);
- node_next(q) = r;
- tex_aux_set_delta_from_difference(properties->adjust_spacing, q, cur_active_width, lmt_linebreak_state.break_width);
- node_next(prev_r) = q;
+ halfword delta = tex_new_node(delta_node, (quarterword) very_loose_fit);
+ node_next(delta) = r;
+ tex_aux_set_delta_from_difference(properties->adjust_spacing, delta, cur_active_width, lmt_linebreak_state.break_width);
+ node_next(prev_r) = delta;
prev_prev_r = prev_r;
- prev_r = q;
+ prev_r = delta;
}
}
/*tex
@@ -1657,17 +1799,17 @@ static void tex_aux_try_break(
*/
if (r == active_head) {
return;
- } else if (l > lmt_linebreak_state.easy_line) {
- old_l = max_halfword - 1;
+ } else if (line > lmt_linebreak_state.easy_line) {
+ old_line = max_halfword - 1;
line_width = lmt_linebreak_state.second_width;
} else {
- old_l = l;
+ old_line = line;
/* if (properties->par_shape && specification_repeat(properties->par_shape)) {
line_width = get_specification_width(properties->par_shape, l);
- } else */ if (l > lmt_linebreak_state.last_special_line) {
+ } else */ if (line > lmt_linebreak_state.last_special_line) {
line_width = lmt_linebreak_state.second_width;
} else if (properties->par_shape) {
- line_width = tex_get_specification_width(properties->par_shape, l);
+ line_width = tex_get_specification_width(properties->par_shape, line);
} else {
line_width = lmt_linebreak_state.first_width;
}
@@ -1694,49 +1836,7 @@ static void tex_aux_try_break(
// halfword margin_kern_stretch = 0;
// halfword margin_kern_shrink = 0;
if (properties->protrude_chars) {
- // if (line_break_dir == dir_righttoleft) {
- // /*tex Not now, we need to keep more track. */
- // } else {
- halfword o = null;
- halfword l1 = active_break_node(r) ? passive_cur_break(active_break_node(r)) : first_p;
- if (cur_p) {
- o = node_prev(cur_p);
- if (node_next(o) != cur_p) {
- tex_normal_error("linebreak", "the node list is messed up");
- }
- }
- /*tex
-
- The last characters (hyphenation character) if these two list should always be
- the same anyway, so we just look at |pre_break|. Let's look at the right margin
- first.
-
- */
- if (cur_p && node_type(cur_p) == disc_node && disc_pre_break_head(cur_p)) {
- /*tex
- A |disc_node| with non-empty |pre_break|, protrude the last char of
- |pre_break|:
- */
- o = disc_pre_break_tail(cur_p);
- } else {
- o = tex_aux_find_protchar_right(l1, o);
- }
- if (o && node_type(o) == glyph_node) {
- shortfall += tex_char_protrusion(o, right_margin_kern_subtype);
- // char_pw_kern(o, right_margin_kern, &margin_kern_stretch, &margin_kern_shrink);
- }
- /*tex now the left margin */
- if (l1 && (node_type(l1) == disc_node) && (disc_post_break_head(l1))) {
- /*tex The first char could be a disc! Protrude the first char. */
- o = disc_post_break_head(l1);
- } else {
- o = tex_aux_find_protchar_left(l1, 1);
- }
- if (o && node_type(o) == glyph_node) {
- shortfall += tex_char_protrusion(o, left_margin_kern_subtype);
- // char_pw_kern(o, left_margin_kern, &margin_kern_stretch, &margin_kern_shrink);
- }
- // }
+ tex_check_protrusion_shortfall(r, first_p, cur_p, &shortfall);
}
/*tex
The only reason why we have a shared ratio is that we need to calculate the shortfall
@@ -1745,8 +1845,8 @@ static void tex_aux_try_break(
if (shortfall == 0) {
/*tex We're okay. */
} else if (shortfall > 0) {
- halfword total_stretch = total_font_stretch;
- // halfword total_stretch = total_font_stretch + margin_kern_stretch;
+ halfword total_stretch = cur_active_width[font_stretch_amount];
+ // halfword total_stretch = cur_active_width[font_stretch_amount] + margin_kern_stretch;
if (total_stretch > 0) {
if (total_stretch > shortfall) {
shortfall = (total_stretch / (lmt_linebreak_state.max_stretch_ratio / lmt_linebreak_state.current_font_step)) / 2;
@@ -1755,11 +1855,11 @@ static void tex_aux_try_break(
}
}
} else if (shortfall < 0) {
- halfword total_shrink = total_font_shrink;
- // halfword total_shrink = total_font_shrink + margin_kern_shrink;
+ halfword total_shrink = cur_active_width[font_shrink_amount];
+ // halfword total_shrink = cur_active_width[font_shrink_amount] + margin_kern_shrink;
if (total_shrink > 0) {
if (total_shrink > -shortfall) {
- shortfall = - (total_shrink / (lmt_linebreak_state.max_shrink_ratio / lmt_linebreak_state.current_font_step)) / 2;
+ shortfall = - (total_shrink / (lmt_linebreak_state.max_shrink_ratio / lmt_linebreak_state.current_font_step)) / 2;
} else {
shortfall += total_shrink;
}
@@ -1809,23 +1909,23 @@ static void tex_aux_try_break(
goto NOT_FOUND;
}
if (active_short(r) > 0) {
- g = cur_active_width[total_stretch_amount];
+ glue = cur_active_width[total_stretch_amount];
} else {
- g = cur_active_width[total_shrink_amount];
+ glue = cur_active_width[total_shrink_amount];
}
- if (g <= 0) {
+ if (glue <= 0) {
/*tex No finite stretch resp.\ no shrink. */
goto NOT_FOUND;
}
lmt_scanner_state.arithmic_error = 0;
- g = tex_fract(g, active_short(r), active_glue(r), max_dimen);
+ glue = tex_fract(glue, active_short(r), active_glue(r), max_dimen);
if (properties->last_line_fit < 1000) {
- g = tex_fract(g, properties->last_line_fit, 1000, max_dimen);
+ glue = tex_fract(glue, properties->last_line_fit, 1000, max_dimen);
}
if (lmt_scanner_state.arithmic_error) {
- g = (active_short(r) > 0) ? max_dimen : -max_dimen;
+ glue = (active_short(r) > 0) ? max_dimen : -max_dimen;
}
- if (g > 0) {
+ if (glue > 0) {
/*tex
Set the value of |b| to the badness of the last line for stretching,
@@ -1834,44 +1934,44 @@ static void tex_aux_try_break(
algorithm, with the adjustment amount |g| replacing the |shortfall|.
*/
- if (g > shortfall) {
- g = shortfall;
+ if (glue > shortfall) {
+ glue = shortfall;
}
- if (g > large_width_excess && (cur_active_width[total_stretch_amount] < small_stretchability)) {
- b = infinite_bad;
+ if (glue > large_width_excess && (cur_active_width[total_stretch_amount] < small_stretchability)) {
+ badness = infinite_bad;
fit_class = very_loose_fit;
- goto FOUND;
+ } else {
+ badness = tex_badness(glue, cur_active_width[total_stretch_amount]);
+ fit_class = tex_normalized_loose_badness(badness, loose, semi_loose, decent);
}
- b = tex_badness(g, cur_active_width[total_stretch_amount]);
- fit_class = tex_normalized_loose_badness(b, loose, semi_loose, decent);
goto FOUND;
- } else if (g < 0) {
+ } else if (glue < 0) {
/*tex
Set the value of |b| to the badness of the last line for shrinking,
compute the corresponding |fit_class, and |goto found||.
*/
- if (-g > cur_active_width[total_shrink_amount]) {
- g = -cur_active_width[total_shrink_amount];
+ if (-glue > cur_active_width[total_shrink_amount]) {
+ glue = -cur_active_width[total_shrink_amount];
}
- b = tex_badness(-g, cur_active_width[total_shrink_amount]);
- fit_class = tex_normalized_tight_badness(b, decent, semi_tight);
+ badness = tex_badness(-glue, cur_active_width[total_shrink_amount]);
+ fit_class = tex_normalized_tight_badness(badness, decent, semi_tight);
goto FOUND;
}
}
NOT_FOUND:
shortfall = 0;
}
- b = 0;
+ badness = 0;
/*tex Infinite stretch. */
fit_class = decent_fit;
} else if (shortfall > large_width_excess && cur_active_width[total_stretch_amount] < small_stretchability) {
- b = infinite_bad;
+ badness = infinite_bad;
fit_class = very_loose_fit;
} else {
- b = tex_badness(shortfall, cur_active_width[total_stretch_amount]);
- fit_class = tex_normalized_loose_badness(b, loose, semi_loose, decent);
+ badness = tex_badness(shortfall, cur_active_width[total_stretch_amount]);
+ fit_class = tex_normalized_loose_badness(badness, loose, semi_loose, decent);
}
} else {
/*tex
@@ -1883,27 +1983,27 @@ static void tex_aux_try_break(
*/
if (-shortfall > cur_active_width[total_shrink_amount]) {
- b = infinite_bad + 1;
+ badness = infinite_bad + 1;
} else {
- b = tex_badness(-shortfall, cur_active_width[total_shrink_amount]);
+ badness = tex_badness(-shortfall, cur_active_width[total_shrink_amount]);
}
- fit_class = tex_normalized_tight_badness(b, decent, semi_tight);
+ fit_class = tex_normalized_tight_badness(badness, decent, semi_tight);
}
if (lmt_linebreak_state.do_last_line_fit) {
/*tex Adjust the additional data for last line; */
if (! cur_p) {
shortfall = 0;
- g = 0;
+ glue = 0;
} else if (shortfall > 0) {
- g = cur_active_width[total_stretch_amount];
+ glue = cur_active_width[total_stretch_amount];
} else if (shortfall < 0) {
- g = cur_active_width[total_shrink_amount];
+ glue = cur_active_width[total_shrink_amount];
} else {
- g = 0;
+ glue = 0;
}
}
FOUND:
- if ((b > infinite_bad) || (pi == eject_penalty)) {
+ if ((badness > infinite_bad) || (penalty == eject_penalty)) {
/*tex
Prepare to deactivate node~|r|, and |goto deactivate| unless there is a reason to
@@ -1916,17 +2016,16 @@ static void tex_aux_try_break(
changes here.
*/
- if (lmt_linebreak_state.final_pass && (lmt_linebreak_state.minimum_demerits == awful_bad) &&
- (node_next(r) == active_head) && (prev_r == active_head)) {
+ if (lmt_linebreak_state.final_pass && (lmt_linebreak_state.minimum_demerits == awful_bad) && (node_next(r) == active_head) && (prev_r == active_head)) {
/*tex Set demerits zero, this break is forced. */
artificial_demerits = 1;
- } else if (b > lmt_linebreak_state.threshold) {
+ } else if (badness > lmt_linebreak_state.threshold) {
goto DEACTIVATE;
}
node_r_stays_active = 0;
} else {
prev_r = r;
- if (b > lmt_linebreak_state.threshold) {
+ if (badness > lmt_linebreak_state.threshold) {
continue;
} else {
node_r_stays_active = 1;
@@ -1942,27 +2041,27 @@ static void tex_aux_try_break(
*/
if (artificial_demerits) {
- d = 0;
+ demerits = 0;
} else {
/*tex Compute the demerits, |d|, from |r| to |cur_p|. */
- d = properties->line_penalty + b;
- if (abs(d) >= 10000) {
- d = 100000000;
+ demerits = properties->line_penalty + badness;
+ if (abs(demerits) >= 10000) {
+ demerits = 100000000;
} else {
- d = d * d;
+ demerits = demerits * demerits;
}
- if (pi != 0) {
- if (pi > 0) {
- d += (pi * pi);
- } else if (pi > eject_penalty) {
- d -= (pi * pi);
+ if (penalty != 0) {
+ if (penalty > 0) {
+ demerits += (penalty * penalty);
+ } else if (penalty > eject_penalty) {
+ demerits -= (penalty * penalty);
}
}
if (break_type == hyphenated_node && node_type(r) == hyphenated_node) {
if (cur_p) {
- d += properties->double_hyphen_demerits;
+ demerits += properties->double_hyphen_demerits;
} else {
- d += properties->final_hyphen_demerits;
+ demerits += properties->final_hyphen_demerits;
}
}
/*tex
@@ -1972,18 +2071,18 @@ static void tex_aux_try_break(
used.
*/
if (abs(fit_class - (halfword) active_fitness(r)) > 1) {
- d = d + properties->adj_demerits;
+ demerits = demerits + properties->adj_demerits;
}
}
if (properties->tracing_paragraphs > 0) {
- tex_aux_print_feasible_break(cur_p, r, b, pi, d, artificial_demerits, properties);
+ tex_aux_print_feasible_break(cur_p, r, badness, penalty, demerits, artificial_demerits, properties);
}
/*tex This is the minimum total demerits from the beginning to |cur_p| via |r|. */
- d += active_total_demerits(r);
- if (d <= lmt_linebreak_state.minimal_demerits[fit_class]) {
- lmt_linebreak_state.minimal_demerits[fit_class] = d;
+ demerits += active_total_demerits(r);
+ if (demerits <= lmt_linebreak_state.minimal_demerits[fit_class]) {
+ lmt_linebreak_state.minimal_demerits[fit_class] = demerits;
best_place[fit_class] = active_break_node(r);
- best_place_line[fit_class] = l;
+ best_place_line[fit_class] = line;
if (lmt_linebreak_state.do_last_line_fit) {
/*tex
@@ -1992,10 +2091,10 @@ static void tex_aux_try_break(
*/
best_place_short[fit_class] = shortfall;
- best_place_glue[fit_class] = g;
+ best_place_glue[fit_class] = glue;
}
- if (d < lmt_linebreak_state.minimum_demerits) {
- lmt_linebreak_state.minimum_demerits = d;
+ if (demerits < lmt_linebreak_state.minimum_demerits) {
+ lmt_linebreak_state.minimum_demerits = demerits;
}
}
/*tex Record a new feasible break. */
@@ -2013,6 +2112,9 @@ static void tex_aux_try_break(
*/
node_next(prev_r) = node_next(r);
+ if (callback_id) {
+ tex_aux_delete_break_node(r, active_break_node(r), callback_id);
+ }
tex_flush_node(r);
if (prev_r == active_head) {
/*tex
@@ -2054,9 +2156,10 @@ static halfword tex_aux_inject_orphan_penalty(halfword current, halfword amount)
halfword penalty = tex_new_penalty_node(amount, orphan_penalty_subtype);
tex_couple_nodes(previous, penalty);
tex_couple_nodes(penalty, current);
- current = previous;
+ return previous;
+ } else {
+ return current;
}
- return current;
}
inline static int tex_aux_valid_glue_break(halfword p)
@@ -2065,13 +2168,47 @@ inline static int tex_aux_valid_glue_break(halfword p)
return (prv && prv != temp_head && (node_type(prv) == glyph_node || precedes_break(prv) || precedes_kern(prv) || precedes_dir(prv)));
}
+inline static halfword tex_aux_upcoming_penalty(halfword p) {
+ halfword n = node_next(p);
+ return (n && node_type(n) == math_node && node_subtype(n) == begin_inline_math) ? math_penalty(n) : 0;
+}
+
+/*tex
+
+ I played a bit with a height driven hanging indentation. One can store |cur_p| in the active
+ node and progressively calculate the height + depth and then act on that but in the end
+ interline space, adjustsm etc. also have to be taken into account and that all happens later
+ so in the end it makes no sense. There are valdi reasons why \TEX\ can't do some things
+ reliable: user demands are unpredictable.
+
+*/
+
+/*tex
+
+ Here we pickup the line number from |prev_graf| which relates to display math inside a
+ paragraph. A display formula is then considered to span three lines. Of course this also
+ assume a constant baseline distance with lines heigths not exceeding that amount. It also
+ assumes that the shape and hang are not reset. We check the prevgraf for a large value
+ because when we're close to |max_integer| we can wrap around due to addition beyond that
+ and negative values has side effects (see musings-sideffects) but it's optional so that we
+ can actually use these side effects.
+
+*/
+
+# define max_prev_graf (max_integer/2)
+
void tex_do_line_break(line_break_properties *properties)
{
/*tex Miscellaneous nodes of temporary interest. */
- halfword cur_p, l, r;
int line_break_dir = properties->paragraph_dir;
+ int callback_id = lmt_callback_defined(show_break_callback);
int force_check_hyphenation = hyphenation_permitted(properties->hyphenation_mode, force_check_hyphenation_mode);
(void) (properties->inter_line_penalties); /* avoid not used message */
+ /*tex Fix a buglet that probably is a feature. */
+ if ((cur_list.prev_graf > max_prev_graf || cur_list.prev_graf < 0) && normalize_par_mode_permitted(normalize_par_mode_par, limit_prev_graf_mode)) {
+ tex_formatted_warning("tex", "clipping prev_graf %i to %i", cur_list.prev_graf, max_prev_graf);
+ cur_list.prev_graf = max_prev_graf;
+ }
/*tex Get ready to start */
lmt_linebreak_state.fewest_demerits = 0;
lmt_linebreak_state.actual_looseness = 0;
@@ -2228,17 +2365,19 @@ void tex_do_line_break(line_break_properties *properties)
lmt_linebreak_state.easy_line = max_halfword;
}
lmt_linebreak_state.no_shrink_error_yet = 1;
- l = properties->left_skip;
- r = properties->right_skip;
- lmt_linebreak_state.background[total_glue_amount] = glue_amount(l) + glue_amount(r);
- lmt_linebreak_state.background[total_stretch_amount] = 0;
- lmt_linebreak_state.background[total_fi_amount] = 0;
- lmt_linebreak_state.background[total_fil_amount] = 0;
- lmt_linebreak_state.background[total_fill_amount] = 0;
- lmt_linebreak_state.background[total_filll_amount] = 0;
- lmt_linebreak_state.background[total_stretch_amount + glue_stretch_order(l)] = glue_stretch(l);
- lmt_linebreak_state.background[total_stretch_amount + glue_stretch_order(r)] += glue_stretch(r);
- lmt_linebreak_state.background[total_shrink_amount] = tex_aux_checked_shrink(l) + tex_aux_checked_shrink(r);
+ {
+ halfword l = properties->left_skip;
+ halfword r = properties->right_skip;
+ lmt_linebreak_state.background[total_glue_amount] = glue_amount(l) + glue_amount(r);
+ lmt_linebreak_state.background[total_stretch_amount] = 0;
+ lmt_linebreak_state.background[total_fi_amount] = 0;
+ lmt_linebreak_state.background[total_fil_amount] = 0;
+ lmt_linebreak_state.background[total_fill_amount] = 0;
+ lmt_linebreak_state.background[total_filll_amount] = 0;
+ lmt_linebreak_state.background[total_stretch_amount + glue_stretch_order(l)] = glue_stretch(l);
+ lmt_linebreak_state.background[total_stretch_amount + glue_stretch_order(r)] += glue_stretch(r);
+ lmt_linebreak_state.background[total_shrink_amount] = tex_aux_checked_shrink(l) + tex_aux_checked_shrink(r);
+ }
if (properties->adjust_spacing) {
lmt_linebreak_state.background[font_stretch_amount] = 0;
lmt_linebreak_state.background[font_shrink_amount] = 0;
@@ -2283,7 +2422,6 @@ void tex_do_line_break(line_break_properties *properties)
tex_short_display(node_next(temp_head));
tex_end_diagnostic();
}
-
if (lmt_linebreak_state.threshold >= 0) {
if (properties->tracing_paragraphs > 0) {
tex_begin_diagnostic();
@@ -2299,354 +2437,373 @@ void tex_do_line_break(line_break_properties *properties)
tex_begin_diagnostic();
}
}
- while (1) {
- halfword first_p, q;
- halfword nest_stack[10];
- int nest_index = 0;
- if (lmt_linebreak_state.threshold > infinite_bad) {
- lmt_linebreak_state.threshold = infinite_bad;
- }
- /*tex Create an active breakpoint representing the beginning of the paragraph. */
- q = tex_new_node(unhyphenated_node, (quarterword) decent_fit);
- node_next(q) = active_head;
- active_break_node(q) = null;
- active_line_number(q) = cur_list.prev_graf + 1;
- active_total_demerits(q) = 0;
- active_short(q) = 0;
- active_glue(q) = 0;
- node_next(active_head) = q; /* we create a cycle */
- tex_aux_set_target_to_source(properties->adjust_spacing, lmt_linebreak_state.active_width, lmt_linebreak_state.background);
- lmt_linebreak_state.passive = null;
- lmt_linebreak_state.printed_node = temp_head;
- lmt_linebreak_state.pass_number = 0;
- lmt_print_state.font_in_short_display = null_font;
- /*tex Create an active breakpoint representing the beginning of the paragraph. */
- /* lmt_linebreak_state.auto_breaking = 1; */ /* gone */
- cur_p = node_next(temp_head);
- /*tex Initialize with first (or current) |par| node. */
- if (cur_p && node_type(cur_p) == par_node) {
- node_prev(cur_p) = temp_head;
- lmt_linebreak_state.internal_penalty_interline = tex_get_local_interline_penalty(cur_p);
- lmt_linebreak_state.internal_penalty_broken = tex_get_local_broken_penalty(cur_p);
- lmt_linebreak_state.init_internal_left_box = par_box_left(cur_p);
- lmt_linebreak_state.init_internal_left_box_width = tex_get_local_left_width(cur_p);
- lmt_linebreak_state.internal_right_box = par_box_right(cur_p);
- lmt_linebreak_state.internal_right_box_width = tex_get_local_right_width(cur_p);
- lmt_linebreak_state.internal_middle_box = par_box_middle(cur_p);
- } else {
- lmt_linebreak_state.internal_penalty_interline = 0;
- lmt_linebreak_state.internal_penalty_broken = 0;
- lmt_linebreak_state.init_internal_left_box = null;
- lmt_linebreak_state.init_internal_left_box_width = 0;
- lmt_linebreak_state.internal_right_box = null;
- lmt_linebreak_state.internal_right_box_width = 0;
- lmt_linebreak_state.internal_middle_box = null;
- }
- lmt_linebreak_state.internal_left_box = lmt_linebreak_state.init_internal_left_box;
- lmt_linebreak_state.internal_left_box_width = lmt_linebreak_state.init_internal_left_box_width;
- lmt_packaging_state.previous_char_ptr = null;
- first_p = cur_p;
- /*tex
+ if (callback_id) {
+ tex_aux_initialize_show_break_node(callback_id);
+ }
+ {
+ halfword cur_p = null;
+ int pass = 0;
+ while (++pass) {
+ halfword first_p = node_next(temp_head);
+ cur_p = first_p;
+ if (lmt_linebreak_state.threshold > infinite_bad) {
+ lmt_linebreak_state.threshold = infinite_bad;
+ }
+ if (callback_id) {
+ tex_aux_start_show_break_node(callback_id, pass);
+ }
+ /*tex Create an active breakpoint representing the beginning of the paragraph. */
+ {
+ halfword initial = tex_new_node(unhyphenated_node, (quarterword) decent_fit);
+ node_next(initial) = active_head;
+ active_break_node(initial) = null;
+ active_line_number(initial) = cur_list.prev_graf + 1;
+ active_total_demerits(initial) = 0; // default
+ active_short(initial) = 0; // default
+ active_glue(initial) = 0; // default
+ // active_reserved(initial) = 0; // default
+ node_next(active_head) = initial;
+ }
+ /*tex We now have created a cycle. */
+ tex_aux_set_target_to_source(properties->adjust_spacing, lmt_linebreak_state.active_width, lmt_linebreak_state.background);
+ lmt_linebreak_state.passive = null;
+ lmt_linebreak_state.printed_node = temp_head;
+ lmt_linebreak_state.pass_number = 0;
+ lmt_print_state.font_in_short_display = null_font;
+ /*tex Create an active breakpoint representing the beginning of the paragraph. */
+ /* lmt_linebreak_state.auto_breaking = 1; */ /* gone */
+ // cur_p = node_next(temp_head);
+ /*tex Initialize with first (or current) |par| node. */
+ if (cur_p && node_type(cur_p) == par_node) {
+ node_prev(cur_p) = temp_head;
+ lmt_linebreak_state.internal_penalty_interline = tex_get_local_interline_penalty(cur_p);
+ lmt_linebreak_state.internal_penalty_broken = tex_get_local_broken_penalty(cur_p);
+ lmt_linebreak_state.init_internal_left_box = par_box_left(cur_p);
+ lmt_linebreak_state.init_internal_left_box_width = tex_get_local_left_width(cur_p);
+ lmt_linebreak_state.internal_right_box = par_box_right(cur_p);
+ lmt_linebreak_state.internal_right_box_width = tex_get_local_right_width(cur_p);
+ lmt_linebreak_state.internal_middle_box = par_box_middle(cur_p);
+ } else {
+ lmt_linebreak_state.internal_penalty_interline = 0;
+ lmt_linebreak_state.internal_penalty_broken = 0;
+ lmt_linebreak_state.init_internal_left_box = null;
+ lmt_linebreak_state.init_internal_left_box_width = 0;
+ lmt_linebreak_state.internal_right_box = null;
+ lmt_linebreak_state.internal_right_box_width = 0;
+ lmt_linebreak_state.internal_middle_box = null;
+ }
+ lmt_linebreak_state.internal_left_box = lmt_linebreak_state.init_internal_left_box;
+ lmt_linebreak_state.internal_left_box_width = lmt_linebreak_state.init_internal_left_box_width;
+ lmt_packaging_state.previous_char_ptr = null;
+ // first_p = cur_p;
+ /*tex
- To access the first node of paragraph as the first active node has |break_node = null|.
+ To access the first node of paragraph as the first active node has |break_node = null|.
- Determine legal breaks: As we move through the hlist, we need to keep the |active_width|
- array up to date, so that the badness of individual lines is readily calculated by
- |try_break|. It is convenient to use the short name |active_width [1]| for the component
- of active width that represents real width as opposed to glue.
+ Determine legal breaks: As we move through the hlist, we need to keep the |active_width|
+ array up to date, so that the badness of individual lines is readily calculated by
+ |try_break|. It is convenient to use the short name |active_width [1]| for the component
+ of active width that represents real width as opposed to glue.
- Advance |cur_p| to the node following the present string of characters. The code that
- passes over the characters of words in a paragraph is part of \TEX's inner loop, so it
- has been streamlined for speed. We use the fact that |\parfillskip| glue appears at the
- end of each paragraph; it is therefore unnecessary to check if |vlink (cur_p) = null|
- when |cur_p| is a character node.
+ Advance |cur_p| to the node following the present string of characters. The code that
+ passes over the characters of words in a paragraph is part of \TEX's inner loop, so it
+ has been streamlined for speed. We use the fact that |\parfillskip| glue appears at the
+ end of each paragraph; it is therefore unnecessary to check if |vlink (cur_p) = null|
+ when |cur_p| is a character node.
- */
- while (cur_p && (node_next(active_head) != active_head)) { /* we check the cycle */
- switch (node_type(cur_p)) {
- case glyph_node:
- lmt_linebreak_state.active_width[total_glue_amount] += tex_glyph_width_ex(cur_p);
- if (properties->adjust_spacing && tex_aux_check_expand_pars(properties->adjust_spacing_step, glyph_font(cur_p))) {
- lmt_packaging_state.previous_char_ptr = cur_p;
- lmt_linebreak_state.active_width[font_stretch_amount] += tex_char_stretch(cur_p);
- lmt_linebreak_state.active_width[font_shrink_amount] += tex_char_shrink(cur_p);
- }
- break;
- case hlist_node:
- case vlist_node:
- lmt_linebreak_state.active_width[total_glue_amount] += box_width(cur_p);
- break;
- case rule_node:
- lmt_linebreak_state.active_width[total_glue_amount] += rule_width(cur_p);
- break;
- case dir_node:
- /*tex Adjust the dir stack for the |line_break| routine. */
- line_break_dir = tex_update_dir_state(cur_p, properties->paragraph_dir);
- break;
- case par_node:
- /*tex Advance past a |par| node. */
- lmt_linebreak_state.internal_penalty_interline = tex_get_local_interline_penalty(cur_p);
- lmt_linebreak_state.internal_penalty_broken = tex_get_local_broken_penalty(cur_p);
- lmt_linebreak_state.internal_left_box = par_box_left(cur_p);
- lmt_linebreak_state.internal_left_box_width = tex_get_local_left_width(cur_p);
- lmt_linebreak_state.internal_right_box = par_box_right(cur_p);
- lmt_linebreak_state.internal_right_box_width = tex_get_local_right_width(cur_p);
- lmt_linebreak_state.internal_middle_box = par_box_middle(cur_p);
- break;
- case glue_node:
- /*tex
+ */
+ while (cur_p && (node_next(active_head) != active_head)) { /* we check the cycle */
+ switch (node_type(cur_p)) {
+ case glyph_node:
+ /* why ex here and not in add/sub disc glyphs */
+ lmt_linebreak_state.active_width[total_glue_amount] += tex_glyph_width_ex(cur_p); // ex
+ if (properties->adjust_spacing && tex_aux_check_expand_pars(properties->adjust_spacing_step, glyph_font(cur_p))) {
+ lmt_packaging_state.previous_char_ptr = cur_p;
+ lmt_linebreak_state.active_width[font_stretch_amount] += tex_char_stretch(cur_p);
+ lmt_linebreak_state.active_width[font_shrink_amount] += tex_char_shrink(cur_p);
+ }
+ break;
+ case hlist_node:
+ case vlist_node:
+ lmt_linebreak_state.active_width[total_glue_amount] += box_width(cur_p);
+ break;
+ case rule_node:
+ lmt_linebreak_state.active_width[total_glue_amount] += rule_width(cur_p);
+ break;
+ case dir_node:
+ /*tex Adjust the dir stack for the |line_break| routine. */
+ line_break_dir = tex_update_dir_state(cur_p, properties->paragraph_dir);
+ break;
+ case par_node:
+ /*tex Advance past a |par| node. */
+ lmt_linebreak_state.internal_penalty_interline = tex_get_local_interline_penalty(cur_p);
+ lmt_linebreak_state.internal_penalty_broken = tex_get_local_broken_penalty(cur_p);
+ lmt_linebreak_state.internal_left_box = par_box_left(cur_p);
+ lmt_linebreak_state.internal_left_box_width = tex_get_local_left_width(cur_p);
+ lmt_linebreak_state.internal_right_box = par_box_right(cur_p);
+ lmt_linebreak_state.internal_right_box_width = tex_get_local_right_width(cur_p);
+ lmt_linebreak_state.internal_middle_box = par_box_middle(cur_p);
+ break;
+ case glue_node:
+ /*tex
- If node |cur_p| is a legal breakpoint, call |try_break|; then update the
- active widths by including the glue in |glue_ptr(cur_p)|.
+ If node |cur_p| is a legal breakpoint, call |try_break|; then update the
+ active widths by including the glue in |glue_ptr(cur_p)|.
- When node |cur_p| is a glue node, we look at the previous to see whether or
- not a breakpoint is legal at |cur_p|, as explained above.
+ When node |cur_p| is a glue node, we look at the previous to see whether
+ or not a breakpoint is legal at |cur_p|, as explained above.
- We only break after certain nodes (see texnodes.h), a font related kern and
- a dir node when |\breakafterdirmode = 1|.
+ We only break after certain nodes (see texnodes.h), a font related kern
+ and a dir node when |\breakafterdirmode = 1|.
- */
- if (tex_has_glue_option(cur_p, glue_option_no_auto_break)) {
- /*tex Glue in math is not a valid breakpoint. */
- } else if (tex_is_par_init_glue(cur_p)) {
- /*tex Of course we don't break here. */
- } else if (tex_aux_valid_glue_break(cur_p)) {
- tex_aux_try_break(properties, 0, unhyphenated_node, first_p, cur_p);
- }
- lmt_linebreak_state.active_width[total_glue_amount] += glue_amount(cur_p);
- lmt_linebreak_state.active_width[2 + glue_stretch_order(cur_p)] += glue_stretch(cur_p);
- lmt_linebreak_state.active_width[total_shrink_amount] += tex_aux_checked_shrink(cur_p);
- break;
- case kern_node:
- switch (node_subtype(cur_p)) {
- case explicit_kern_subtype:
- case italic_kern_subtype:
- {
- /* there used to a ! is_char_node(node_next(cur_p)) test */
- halfword nxt = node_next(cur_p);
- if (nxt && node_type(nxt) == glue_node && ! tex_has_glue_option(nxt, glue_option_no_auto_break)) {
- tex_aux_try_break(properties, 0, unhyphenated_node, first_p, cur_p);
+ */
+ if (tex_has_glue_option(cur_p, glue_option_no_auto_break)) {
+ /*tex Glue in math is not a valid breakpoint, unless we permit it. */
+ } else if (tex_is_par_init_glue(cur_p)) {
+ /*tex Of course we don't break here. */
+ } else if (tex_aux_valid_glue_break(cur_p)) {
+ tex_aux_try_break(properties, tex_aux_upcoming_penalty(cur_p), unhyphenated_node, first_p, cur_p, callback_id, pass);
+ }
+ lmt_linebreak_state.active_width[total_glue_amount] += glue_amount(cur_p);
+ lmt_linebreak_state.active_width[total_stretch_amount + glue_stretch_order(cur_p)] += glue_stretch(cur_p);
+ lmt_linebreak_state.active_width[total_shrink_amount] += tex_aux_checked_shrink(cur_p);
+ break;
+ case kern_node:
+ switch (node_subtype(cur_p)) {
+ case explicit_kern_subtype:
+ case italic_kern_subtype:
+ {
+ /* there used to a ! is_char_node(node_next(cur_p)) test */
+ halfword nxt = node_next(cur_p);
+ if (nxt && node_type(nxt) == glue_node && ! tex_has_glue_option(nxt, glue_option_no_auto_break)) {
+ tex_aux_try_break(properties, 0, unhyphenated_node, first_p, cur_p, callback_id, pass);
+ }
}
- }
- break;
- case font_kern_subtype:
- if (properties->adjust_spacing == adjust_spacing_full) {
- lmt_linebreak_state.active_width[font_stretch_amount] += tex_kern_stretch(cur_p);
- lmt_linebreak_state.active_width[font_shrink_amount] += tex_kern_shrink(cur_p);
- }
- break;
- }
- lmt_linebreak_state.active_width[total_glue_amount] += kern_amount(cur_p);
- break;
- case disc_node:
- /*tex
-
- Try to break after a discretionary fragment, then |goto done5|. The
- following code knows that discretionary texts contain only character
- nodes, kern nodes, box nodes, and rule nodes. This branch differs a bit
- from older engines because in \LUATEX\ we already have hyphenated the list.
- This means that we need to skip automatic disc nodes. Or better, we need
- to treat discretionaries and explicit hyphens always, even in the first
- pass.
-
- We used to have |init_disc| followed by |select disc| variants where the
- |select_disc|s were handled by the leading |init_disc|. The question is: should
- we bother about select nodes? Knuth indicates in the original source that only
- a very few cases need hyphenation so the exceptional case of >2 char ligatures
- having hyphenation points in between is rare. We'd better have proper compound
- word handling. Keep in mind that these (old) init and select subtypes always
- came in isolated pairs and that they only were meant for the simple (enforced)
- hyphenation discretionaries.
-
- Therefore, this feature has been dropped from \LUAMETATEX. It not only makes
- the code simpler, it also avoids having code on board for border cases that
- even when dealt with are suboptimal. It's better to have nothing that something
- fuzzy. It also makes dealing with (intermediate) node lists easier. If I want
- something like this it should be okay for any situation.
-
- */
- if (force_check_hyphenation || lmt_linebreak_state.second_pass || (node_subtype(cur_p) != syllable_discretionary_code)) {
- halfword actual_penalty = disc_penalty(cur_p);
- halfword s = disc_pre_break_head(cur_p);
- tex_aux_reset_disc_target(properties->adjust_spacing, lmt_linebreak_state.disc_width);
- if (s) {
- tex_aux_add_to_widths(s, properties->adjust_spacing, properties->adjust_spacing_step, lmt_linebreak_state.disc_width);
- tex_aux_add_disc_source_to_target(properties->adjust_spacing, lmt_linebreak_state.active_width, lmt_linebreak_state.disc_width);
- tex_aux_try_break(properties, actual_penalty, hyphenated_node, first_p, cur_p);
- tex_aux_sub_disc_target_from_source(properties->adjust_spacing, lmt_linebreak_state.active_width, lmt_linebreak_state.disc_width);
- } else {
- /*tex trivial pre-break */
- tex_aux_try_break(properties, actual_penalty, hyphenated_node, first_p, cur_p);
+ break;
+ case font_kern_subtype:
+ if (properties->adjust_spacing == adjust_spacing_full) {
+ lmt_linebreak_state.active_width[font_stretch_amount] += tex_kern_stretch(cur_p);
+ lmt_linebreak_state.active_width[font_shrink_amount] += tex_kern_shrink(cur_p);
+ }
+ break;
}
- }
- tex_aux_add_to_widths(disc_no_break_head(cur_p), properties->adjust_spacing, properties->adjust_spacing_step, lmt_linebreak_state.active_width);
- break;
- case penalty_node:
- tex_aux_try_break(properties, penalty_amount(cur_p), unhyphenated_node, first_p, cur_p);
- break;
- case math_node:
- {
- /* there used to a ! is_char_node(node_next(cur_p)) test */
- int finishing = node_subtype(cur_p) == end_inline_math;
- // lmt_linebreak_state.auto_breaking = finishing;
- if (tex_math_glue_is_zero(cur_p) || tex_ignore_math_skip(cur_p)) {
- /*tex
- When we end up here we assume |\mathsurround| but we only check for
- a break when we're ending math. Maybe this is something we need to
- open up. The math specific penalty only kicks in when we break.
- */
- if (finishing && node_type(node_next(cur_p)) == glue_node) {
- tex_aux_try_break(properties, math_penalty(cur_p), unhyphenated_node, first_p, cur_p);
+ lmt_linebreak_state.active_width[total_glue_amount] += kern_amount(cur_p);
+ break;
+ case disc_node:
+ /*tex
+
+ Try to break after a discretionary fragment, then |goto done5|. The
+ following code knows that discretionary texts contain only character
+ nodes, kern nodes, box nodes, and rule nodes. This branch differs a bit
+ from older engines because in \LUATEX\ we already have hyphenated the list.
+ This means that we need to skip automatic disc nodes. Or better, we need
+ to treat discretionaries and explicit hyphens always, even in the first
+ pass.
+
+ We used to have |init_disc| followed by |select disc| variants where the
+ |select_disc|s were handled by the leading |init_disc|. The question is: should
+ we bother about select nodes? Knuth indicates in the original source that only
+ a very few cases need hyphenation so the exceptional case of >2 char ligatures
+ having hyphenation points in between is rare. We'd better have proper compound
+ word handling. Keep in mind that these (old) init and select subtypes always
+ came in isolated pairs and that they only were meant for the simple (enforced)
+ hyphenation discretionaries.
+
+ Therefore, this feature has been dropped from \LUAMETATEX. It not only makes
+ the code simpler, it also avoids having code on board for border cases that
+ even when dealt with are suboptimal. It's better to have nothing that something
+ fuzzy. It also makes dealing with (intermediate) node lists easier. If I want
+ something like this it should be okay for any situation.
+
+ */
+ if (force_check_hyphenation || lmt_linebreak_state.second_pass || (node_subtype(cur_p) != syllable_discretionary_code)) {
+ halfword actual_penalty = disc_penalty(cur_p);
+ halfword pre = disc_pre_break_head(cur_p);
+ tex_aux_reset_disc_target(properties->adjust_spacing, lmt_linebreak_state.disc_width);
+ if (pre) {
+ tex_aux_add_to_widths(pre, properties->adjust_spacing, properties->adjust_spacing_step, lmt_linebreak_state.disc_width);
+ tex_aux_add_disc_source_to_target(properties->adjust_spacing, lmt_linebreak_state.active_width, lmt_linebreak_state.disc_width);
+ tex_aux_try_break(properties, actual_penalty, hyphenated_node, first_p, cur_p, callback_id, pass);
+ tex_aux_sub_disc_target_from_source(properties->adjust_spacing, lmt_linebreak_state.active_width, lmt_linebreak_state.disc_width);
+ } else {
+ /*tex trivial pre-break */
+ tex_aux_try_break(properties, actual_penalty, hyphenated_node, first_p, cur_p, callback_id, pass);
}
- lmt_linebreak_state.active_width[total_glue_amount] += math_surround(cur_p);
- } else {
- /*tex
- This one does quite some testing, is that still needed?
- */
- if (finishing && tex_aux_valid_glue_break(cur_p)) {
- tex_aux_try_break(properties, math_penalty(cur_p), unhyphenated_node, first_p, cur_p);
+ }
+ tex_aux_add_to_widths(disc_no_break_head(cur_p), properties->adjust_spacing, properties->adjust_spacing_step, lmt_linebreak_state.active_width);
+ break;
+ case penalty_node:
+ tex_aux_try_break(properties, penalty_amount(cur_p), unhyphenated_node, first_p, cur_p, callback_id, pass);
+ break;
+ case math_node:
+ {
+ /* there used to a ! is_char_node(node_next(cur_p)) test */
+ int finishing = node_subtype(cur_p) == end_inline_math;
+ // lmt_linebreak_state.auto_breaking = finishing;
+ if (tex_math_glue_is_zero(cur_p) || tex_ignore_math_skip(cur_p)) {
+ /*tex
+ When we end up here we assume |\mathsurround| but we only check for
+ a break when we're ending math. Maybe this is something we need to
+ open up. The math specific penalty only kicks in when we break.
+ */
+ if (finishing && node_type(node_next(cur_p)) == glue_node) {
+ tex_aux_try_break(properties, math_penalty(cur_p), unhyphenated_node, first_p, cur_p, callback_id, pass);
+ }
+ lmt_linebreak_state.active_width[total_glue_amount] += math_surround(cur_p);
+ } else {
+ /*tex
+ This one does quite some testing, is that still needed?
+ */
+ if (finishing && tex_aux_valid_glue_break(cur_p)) {
+ tex_aux_try_break(properties, math_penalty(cur_p), unhyphenated_node, first_p, cur_p, callback_id, pass);
+ }
+ lmt_linebreak_state.active_width[total_glue_amount] += math_amount(cur_p);
+ lmt_linebreak_state.active_width[total_stretch_amount + math_stretch_order(cur_p)] += math_stretch(cur_p);
+ lmt_linebreak_state.active_width[total_shrink_amount] += tex_aux_checked_shrink(cur_p);
}
- lmt_linebreak_state.active_width[total_glue_amount] += math_amount(cur_p);
- lmt_linebreak_state.active_width[2 + math_stretch_order(cur_p)] += math_stretch(cur_p);
- lmt_linebreak_state.active_width[total_shrink_amount] += tex_aux_checked_shrink(cur_p);
}
- }
- break;
- case boundary_node:
- case whatsit_node:
- case mark_node:
- case insert_node:
- case adjust_node:
- /*tex Advance past these nodes in the |line_break| loop. */
- break;
- default:
- tex_formatted_error("parbuilder", "weird node %d in paragraph", node_type(cur_p));
- }
- cur_p = node_next(cur_p);
- while (! cur_p && nest_index > 0) {
- cur_p = nest_stack[--nest_index];
+ break;
+ case boundary_node:
+ case whatsit_node:
+ case mark_node:
+ case insert_node:
+ case adjust_node:
+ /*tex Advance past these nodes in the |line_break| loop. */
+ break;
+ default:
+ tex_formatted_error("parbuilder", "weird node %d in paragraph", node_type(cur_p));
+ }
+ cur_p = node_next(cur_p);
}
- }
- if (! cur_p) {
- /*tex
-
- Try the final line break at the end of the paragraph, and |goto done| if the desired
- breakpoints have been found.
-
- The forced line break at the paragraph's end will reduce the list of breakpoints so
- that all active nodes represent breaks at |cur_p = null|. On the first pass, we
- insist on finding an active node that has the correct \quote {looseness.} On the
- final pass, there will be at least one active node, and we will match the desired
- looseness as well as we can.
+ if (! cur_p) {
+ /*tex
- The global variable |best_bet| will be set to the active node for the best way to
- break the paragraph, and a few other variables are used to help determine what is
- best.
+ Try the final line break at the end of the paragraph, and |goto done| if the desired
+ breakpoints have been found.
- */
- tex_aux_try_break(properties, eject_penalty, hyphenated_node, first_p, cur_p);
- if (node_next(active_head) != active_head) {
- /*tex Find an active node with fewest demerits. */
- r = node_next(active_head);
- lmt_linebreak_state.fewest_demerits = awful_bad;
- do {
- if ((node_type(r) != delta_node) && (active_total_demerits(r) < lmt_linebreak_state.fewest_demerits)) {
- lmt_linebreak_state.fewest_demerits = active_total_demerits(r);
- lmt_linebreak_state.best_bet = r;
- }
- r = node_next(r);
- } while (r != active_head);
- lmt_linebreak_state.best_line = active_line_number(lmt_linebreak_state.best_bet);
- /*tex Find an active node with fewest demerits. */
- if (properties->looseness == 0) {
- goto DONE;
- } else {
- /*tex
+ The forced line break at the paragraph's end will reduce the list of breakpoints so
+ that all active nodes represent breaks at |cur_p = null|. On the first pass, we
+ insist on finding an active node that has the correct \quote {looseness.} On the
+ final pass, there will be at least one active node, and we will match the desired
+ looseness as well as we can.
- Find the best active node for the desired looseness. The adjustment for a
- desired looseness is a slightly more complicated version of the loop just
- considered. Note that if a paragraph is broken into segments by displayed
- equations, each segment will be subject to the looseness calculation,
- independently of the other segments.
+ The global variable |best_bet| will be set to the active node for the best way to
+ break the paragraph, and a few other variables are used to help determine what is
+ best.
- */
- r = node_next(active_head); // can be local
- lmt_linebreak_state.actual_looseness = 0;
+ */
+ tex_aux_try_break(properties, eject_penalty, hyphenated_node, first_p, cur_p, callback_id, pass);
+ if (node_next(active_head) != active_head) {
+ /*tex Find an active node with fewest demerits. */
+ halfword r = node_next(active_head);
+ lmt_linebreak_state.fewest_demerits = awful_bad;
do {
- if (node_type(r) != delta_node) {
- lmt_linebreak_state.line_difference = active_line_number(r) - lmt_linebreak_state.best_line;
- if (((lmt_linebreak_state.line_difference < lmt_linebreak_state.actual_looseness) && (properties->looseness <= lmt_linebreak_state.line_difference))
- || ((lmt_linebreak_state.line_difference > lmt_linebreak_state.actual_looseness) && (properties->looseness >= lmt_linebreak_state.line_difference))) {
- lmt_linebreak_state.best_bet = r;
- lmt_linebreak_state.actual_looseness = lmt_linebreak_state.line_difference;
- lmt_linebreak_state.fewest_demerits = active_total_demerits(r);
- } else if ((lmt_linebreak_state.line_difference == lmt_linebreak_state.actual_looseness) && (active_total_demerits(r) < lmt_linebreak_state.fewest_demerits)) {
- lmt_linebreak_state.best_bet = r;
- lmt_linebreak_state.fewest_demerits = active_total_demerits(r);
- }
+ if ((node_type(r) != delta_node) && (active_total_demerits(r) < lmt_linebreak_state.fewest_demerits)) {
+ lmt_linebreak_state.fewest_demerits = active_total_demerits(r);
+ lmt_linebreak_state.best_bet = r;
}
r = node_next(r);
} while (r != active_head);
lmt_linebreak_state.best_line = active_line_number(lmt_linebreak_state.best_bet);
- /*tex
- Find the best active node for the desired looseness.
- */
- if ((lmt_linebreak_state.actual_looseness == properties->looseness) || lmt_linebreak_state.final_pass) {
+ /*tex Find an active node with fewest demerits. */
+ if (properties->looseness == 0) {
goto DONE;
+ } else {
+ /*tex
+
+ Find the best active node for the desired looseness. The adjustment for a
+ desired looseness is a slightly more complicated version of the loop just
+ considered. Note that if a paragraph is broken into segments by displayed
+ equations, each segment will be subject to the looseness calculation,
+ independently of the other segments.
+
+ */
+ r = node_next(active_head); // can be local
+ lmt_linebreak_state.actual_looseness = 0;
+ do {
+ if (node_type(r) != delta_node) {
+ lmt_linebreak_state.line_difference = active_line_number(r) - lmt_linebreak_state.best_line;
+ if (((lmt_linebreak_state.line_difference < lmt_linebreak_state.actual_looseness) && (properties->looseness <= lmt_linebreak_state.line_difference))
+ || ((lmt_linebreak_state.line_difference > lmt_linebreak_state.actual_looseness) && (properties->looseness >= lmt_linebreak_state.line_difference))) {
+ lmt_linebreak_state.best_bet = r;
+ lmt_linebreak_state.actual_looseness = lmt_linebreak_state.line_difference;
+ lmt_linebreak_state.fewest_demerits = active_total_demerits(r);
+ } else if ((lmt_linebreak_state.line_difference == lmt_linebreak_state.actual_looseness) && (active_total_demerits(r) < lmt_linebreak_state.fewest_demerits)) {
+ lmt_linebreak_state.best_bet = r;
+ lmt_linebreak_state.fewest_demerits = active_total_demerits(r);
+ }
+ }
+ r = node_next(r);
+ } while (r != active_head);
+ lmt_linebreak_state.best_line = active_line_number(lmt_linebreak_state.best_bet);
+ /*tex
+ Find the best active node for the desired looseness.
+ */
+ if ((lmt_linebreak_state.actual_looseness == properties->looseness) || lmt_linebreak_state.final_pass) {
+ goto DONE;
+ }
}
}
+ } else {
+ /*tex So we have cycled: |node_next(active_head) == active_head|. */
+ }
+ /*tex Clean up the memory by removing the break nodes. */
+ cur_p = tex_aux_clean_up_the_memory(cur_p);
+ if (! lmt_linebreak_state.second_pass) {
+ if (properties->tracing_paragraphs > 0) {
+ tex_print_format("%l[linebreak: second pass]"); /* @secondpass */;
+ }
+ lmt_linebreak_state.threshold = properties->tolerance;
+ lmt_linebreak_state.second_pass = 1;
+ lmt_linebreak_state.final_pass = (properties->emergency_stretch <= 0);
+ } else {
+ /*tex If at first you do not succeed, then: */
+ if (properties->tracing_paragraphs > 0) {
+ tex_print_format("%l[linebreak: emergency pass]"); /* @emergencypass */
+ }
+ lmt_linebreak_state.background[total_stretch_amount] += properties->emergency_stretch;
+ lmt_linebreak_state.final_pass = 1;
}
- }
- /*tex Clean up the memory by removing the break nodes. */
- cur_p = tex_aux_clean_up_the_memory(cur_p);
- if (! lmt_linebreak_state.second_pass) {
- if (properties->tracing_paragraphs > 0) {
- tex_print_str("%l[linebreak: second pass]"); /* @secondpass */;
+ if (callback_id) {
+ tex_aux_stop_show_break_node(callback_id);
}
- lmt_linebreak_state.threshold = properties->tolerance;
- lmt_linebreak_state.second_pass = 1;
- lmt_linebreak_state.final_pass = (properties->emergency_stretch <= 0);
- } else {
- /*tex If at first you do not succeed, then: */
- if (properties->tracing_paragraphs > 0) {
- tex_print_str("%l[linebreak: emergency pass]"); /* @emergencypass */
+ }
+ DONE:
+ if (properties->tracing_paragraphs > 0) {
+ tex_end_diagnostic();
+ /*tex
+ This is a bit weird, as only here: |normalize_selector()| while we have diagnostics
+ all over the place.
+ */
+ }
+ if (lmt_linebreak_state.do_last_line_fit) {
+ /*tex
+ Adjust the final line of the paragraph; here we either reset |do_last_line_fit| or
+ adjust the |par_fill_skip| glue.
+ */
+ if (active_short(lmt_linebreak_state.best_bet) == 0) {
+ lmt_linebreak_state.do_last_line_fit = 0;
+ } else {
+ glue_amount(lmt_linebreak_state.last_line_fill) += (active_short(lmt_linebreak_state.best_bet) - active_glue(lmt_linebreak_state.best_bet));
+ glue_stretch(lmt_linebreak_state.last_line_fill) = 0;
}
- lmt_linebreak_state.background[total_stretch_amount] += properties->emergency_stretch;
- lmt_linebreak_state.final_pass = 1;
}
- }
- DONE:
- if (properties->tracing_paragraphs > 0) {
- tex_end_diagnostic();
/*tex
- This is a bit weird, as only here: |normalize_selector()| while we have diagnostics
- all over the place.
+ Break the paragraph at the chosen. Once the best sequence of breakpoints has been found
+ (hurray), we call on the procedure |post_line_break| to finish the remainder of the work.
+ By introducing this subprocedure, we are able to keep |line_break| from getting extremely
+ long. The first thing |ext_post_line_break| does is reset |dir_ptr|.
*/
+ tex_flush_node_list(lmt_linebreak_state.dir_ptr);
+ lmt_linebreak_state.dir_ptr = null;
+ /*tex Here we still have a temp node as head. */
+ tex_aux_post_line_break(properties, line_break_dir, callback_id);
+ /*tex Clean up memory by removing the break nodes (maybe: |tex_flush_node_list(cur_p);|). */
+ tex_aux_clean_up_the_memory(cur_p);
}
- if (lmt_linebreak_state.do_last_line_fit) {
- /*tex
- Adjust the final line of the paragraph; here we either reset |do_last_line_fit| or
- adjust the |par_fill_skip| glue.
- */
- if (active_short(lmt_linebreak_state.best_bet) == 0) {
- lmt_linebreak_state.do_last_line_fit = 0;
- } else {
- glue_amount(lmt_linebreak_state.last_line_fill) += (active_short(lmt_linebreak_state.best_bet) - active_glue(lmt_linebreak_state.best_bet));
- glue_stretch(lmt_linebreak_state.last_line_fill) = 0;
- }
+ if (callback_id) {
+ tex_aux_wrapup_show_break_node(callback_id);
}
- /*tex
- Break the paragraph at the chosen. Once the best sequence of breakpoints has been found
- (hurray), we call on the procedure |post_line_break| to finish the remainder of the work.
- By introducing this subprocedure, we are able to keep |line_break| from getting extremely
- long. The first thing |ext_post_line_break| does is reset |dir_ptr|.
- */
- tex_flush_node_list(lmt_linebreak_state.dir_ptr);
- lmt_linebreak_state.dir_ptr = null;
- /*tex Here we still have a temp node as head. */
- tex_aux_post_line_break(properties, line_break_dir);
- /*tex Clean up the memory by removing the break nodes. */
- cur_p = tex_aux_clean_up_the_memory(cur_p);
}
void tex_get_linebreak_info(int *f, int *a)
@@ -2698,7 +2855,7 @@ static void tex_aux_trace_penalty(const char *what, int line, int index, halfwor
}
}
-static void tex_aux_post_line_break(const line_break_properties *properties, halfword line_break_dir)
+static void tex_aux_post_line_break(const line_break_properties *properties, halfword line_break_dir, int callback_id)
{
/*tex temporary registers for list manipulation */
halfword q, r;
@@ -2733,6 +2890,9 @@ static void tex_aux_post_line_break(const line_break_properties *properties, hal
and having |next_break| fields. Node |r| is the passive node being moved from stack to
stack.
*/
+ if (callback_id) {
+ tex_aux_collect_show_break_node(callback_id);
+ }
q = active_break_node(lmt_linebreak_state.best_bet);
do {
r = q;
@@ -2740,6 +2900,13 @@ static void tex_aux_post_line_break(const line_break_properties *properties, hal
passive_next_break(r) = cur_p;
cur_p = r;
} while (q);
+ if (callback_id) {
+ halfword p = cur_p;
+ while (p) {
+ tex_aux_list_break_node(p, callback_id);
+ p = passive_next_break(p);
+ }
+ }
/*tex prevgraf + 1 */
cur_line = cur_list.prev_graf + 1;
do {
@@ -3310,6 +3477,9 @@ static void tex_aux_post_line_break(const line_break_properties *properties, hal
}
/*tex Call the packaging subroutine, setting |just_box| to the justified box. */
node_subtype(lmt_linebreak_state.just_box) = line_list;
+ if (callback_id) {
+ tex_aux_line_show_break_node(callback_id);
+ }
/*tex Pending content (callback). */
if (node_next(contribute_head)) {
if (! lmt_page_builder_state.output_active) {
@@ -3504,6 +3674,10 @@ static void tex_aux_post_line_break(const line_break_properties *properties, hal
while (1) {
q = node_next(r);
if (node_type(q) == math_node) {
+ if (node_subtype(q) == begin_inline_math) {
+ /*tex We keep it for tracing. */
+ break;
+ }
/*tex begin mathskip code */
math_surround(q) = 0 ;
tex_reset_math_glue_to_zero(q);
@@ -3517,7 +3691,7 @@ static void tex_aux_post_line_break(const line_break_properties *properties, hal
/*tex Keep it. Can be tricky after a |\break| with no follow up (loops). */
break;
} else if (node_type(q) == par_node && node_subtype(q) == local_box_par_subtype) {
- /*tex weird, in the middle somewhere .. these local penalties do this */
+ /*tex Weird, in the middle somewhere .. these local penalties do this. */
break; /* if not we leak, so maybe this needs more testing */
} else if (non_discardable(q)) {
break;
diff --git a/source/luametatex/source/tex/texlinebreak.h b/source/luametatex/source/tex/texlinebreak.h
index 789101999..850d98da0 100644
--- a/source/luametatex/source/tex/texlinebreak.h
+++ b/source/luametatex/source/tex/texlinebreak.h
@@ -79,10 +79,10 @@ typedef struct linebreak_state_info {
halfword pass_number;
/* int auto_breaking; */ /* is gone */
/* int math_level; */ /* was never used */
- scaled active_width[10];
- scaled background[10];
- scaled break_width[10];
- scaled disc_width[10];
+ scaled active_width[n_of_glue_amounts];
+ scaled background[n_of_glue_amounts];
+ scaled break_width[n_of_glue_amounts];
+ scaled disc_width[n_of_glue_amounts];
scaled fill_width[4];
halfword internal_penalty_interline;
halfword internal_penalty_broken;
diff --git a/source/luametatex/source/tex/texlocalboxes.c b/source/luametatex/source/tex/texlocalboxes.c
index 0def018d4..1bcc25bc0 100644
--- a/source/luametatex/source/tex/texlocalboxes.c
+++ b/source/luametatex/source/tex/texlocalboxes.c
@@ -295,19 +295,23 @@ void tex_set_local_right_width(halfword p, scaled width)
halfword tex_get_local_interline_penalty(halfword p)
{
return par_penalty_interline(p);
+ // return par_inter_line_penalty(p);
}
halfword tex_get_local_broken_penalty(halfword p)
{
return par_penalty_broken(p);
+ // return par_broken_penalty(p);
}
void tex_set_local_interline_penalty(halfword p, halfword penalty)
{
par_penalty_interline(p) = penalty;
+ // par_inter_line_penalty(p) = penalty;
}
void tex_set_local_broken_penalty(halfword p, halfword penalty)
{
par_penalty_broken(p) = penalty;
+ // par_broken_penalty(p) = penalty;
}
diff --git a/source/luametatex/source/tex/texmaincontrol.c b/source/luametatex/source/tex/texmaincontrol.c
index 3989dfff0..0489b67ac 100644
--- a/source/luametatex/source/tex/texmaincontrol.c
+++ b/source/luametatex/source/tex/texmaincontrol.c
@@ -1161,9 +1161,9 @@ static void tex_aux_run_par_boundary(void) {
{
halfword n = tex_scan_int(0, NULL);
if (lmt_nest_state.nest_data.ptr == 0 && ! lmt_page_builder_state.output_active) {
- halfword n = tex_new_node(boundary_node, (quarterword) cur_chr);
- boundary_data(n) = n;
- tex_tail_append(n);
+ halfword boundary = tex_new_node(boundary_node, page_boundary);
+ boundary_data(boundary) = n;
+ tex_tail_append(boundary);
if (cur_list.mode == vmode) {
if (! lmt_page_builder_state.output_active) {
tex_page_boundary_message("callback triggered", n);
@@ -1182,9 +1182,9 @@ static void tex_aux_run_par_boundary(void) {
/*tex Not yet, first I need a proper use case. */ /*
case par_boundary:
{
- halfword n = tex_new_node(boundary_node, (quarterword) cur_chr);
- boundary_data(n) = tex_scan_int(0, NULL);
- tex_tail_append(n);
+ halfword boundary = tex_new_node(boundary_node, par_boundary);
+ boundary_data(boundary) = tex_scan_int(0, NULL);
+ tex_tail_append(boundary);
break;
}
*/
@@ -1196,20 +1196,20 @@ static void tex_aux_run_par_boundary(void) {
}
static void tex_aux_run_text_boundary(void) {
- halfword n = tex_new_node(boundary_node, (quarterword) cur_chr);
+ halfword boundary = tex_new_node(boundary_node, (quarterword) cur_chr);
switch (cur_chr) {
case user_boundary:
case protrusion_boundary:
- boundary_data(n) = tex_scan_int(0, NULL);
+ boundary_data(boundary) = tex_scan_int(0, NULL);
break;
case page_boundary:
- /* or maybe force vmode */
+ /*tex Maybe we should force vmode? For now we just ignore the value. */
tex_scan_int(0, NULL);
break;
default:
break;
}
- tex_tail_append(n);
+ tex_tail_append(boundary);
}
static void tex_aux_run_math_boundary(void) {
@@ -1223,6 +1223,7 @@ static void tex_aux_run_math_boundary(void) {
}
case protrusion_boundary:
case page_boundary:
+ /*tex We just ignore the values. */
tex_scan_int(0, NULL);
break;
}
@@ -1248,7 +1249,7 @@ static void tex_aux_run_paragraph_end_vmode(void) {
/*tex We could pass the group and context here if needed and set some parameter. */
-int tex_wrapped_up_paragraph(int context) {
+int tex_wrapped_up_paragraph(int context, int final) {
halfword par = tex_find_par_par(cur_list.head);
lmt_main_control_state.last_par_context = context;
if (par) {
@@ -1263,14 +1264,14 @@ int tex_wrapped_up_paragraph(int context) {
tex_delete_token_reference(eop);
done = 1;
}
- // if (end_of_par_par) {
- // if (! done) {
- // back_input(cur_tok);
- // }
- // begin_token_list(end_of_par_par, end_paragraph_text);
- // update_tex_end_of_par(null);
- // done = 1;
- // }
+ if (final && end_of_group_par) {
+ if (! done) {
+ tex_back_input(cur_tok);
+ }
+ tex_begin_token_list(end_of_group_par, end_paragraph_text);
+ update_tex_end_of_group(null);
+ done = 1;
+ }
return done;
} else {
return 0;
@@ -1278,7 +1279,7 @@ int tex_wrapped_up_paragraph(int context) {
}
static void tex_aux_run_paragraph_end_hmode(void) {
- if (! tex_wrapped_up_paragraph(normal_par_context)) {
+ if (! tex_wrapped_up_paragraph(normal_par_context, 0)) {
if (lmt_input_state.align_state < 0) {
/*tex This tries to recover from an alignment that didn't end properly. */
tex_off_save();
@@ -1585,7 +1586,7 @@ int tex_main_control(void)
return lmt_main_state.run_state == initializing_state && cur_chr == dump_code;
}
/*tex
- Give diagnostic information, if requested When a new token has just been fetched at
+ Give diagnostic information, if requested. When a new token has just been fetched at
|big_switch|, we have an ideal place to monitor \TEX's activity.
*/
if (tracing_commands_par > 0) {
@@ -2499,7 +2500,8 @@ inline static void tex_aux_finish_adjusted_hbox(void)
inline static void tex_aux_finish_vbox(void)
{
- if (! tex_wrapped_up_paragraph(vbox_par_context)) {
+
+ if (! tex_wrapped_up_paragraph(vbox_par_context, 1)) {
tex_end_paragraph(vbox_group, vbox_par_context);
tex_package(vbox_code);
}
@@ -2507,7 +2509,7 @@ inline static void tex_aux_finish_vbox(void)
inline static void tex_aux_finish_vtop(void)
{
- if (! tex_wrapped_up_paragraph(vtop_par_context)) {
+ if (! tex_wrapped_up_paragraph(vtop_par_context, 1)) {
tex_end_paragraph(vtop_group, vtop_par_context);
tex_package(vtop_code);
}
@@ -2515,7 +2517,7 @@ inline static void tex_aux_finish_vtop(void)
inline static void tex_aux_finish_dbox(void)
{
- if (! tex_wrapped_up_paragraph(dbox_par_context)) {
+ if (! tex_wrapped_up_paragraph(dbox_par_context, 1)) {
tex_end_paragraph(dbox_group, dbox_par_context);
tex_package(dbox_code);
}
@@ -2975,7 +2977,7 @@ static void tex_aux_run_kern(void)
{
halfword code = cur_chr;
switch (code) {
- /* not yet enabled and maybe it never will be */
+ /*tex Finally enabled: */
case h_kern_code:
if (cur_mode == vmode) {
tex_back_input(token_val(kern_cmd, normal_kern_code));
@@ -3072,13 +3074,35 @@ void tex_end_paragraph(int group, int context)
static void tex_aux_run_penalty(void)
{
- halfword value = tex_scan_int(0, NULL);
- tex_tail_append(tex_new_penalty_node(value, user_penalty_subtype));
- if (cur_list.mode == vmode) {
- if (! lmt_page_builder_state.output_active) {
- lmt_page_filter_callback(penalty_page_context, 0);
+ halfword code = cur_chr;
+ switch (code) {
+ /*tex Finally enabled: */
+ case h_penalty_code:
+ if (cur_mode == vmode) {
+ tex_back_input(token_val(penalty_cmd, normal_penalty_code));
+ tex_back_input(token_val(begin_paragraph_cmd, quitvmode_par_code));
+ return;
+ } else {
+ break;
+ }
+ case v_penalty_code:
+ if (cur_mode == hmode) {
+ tex_back_input(token_val(penalty_cmd, normal_penalty_code));
+ tex_back_input(token_val(end_paragraph_cmd, normal_end_paragraph_code));
+ return;
+ } else {
+ break;
+ }
+ }
+ {
+ halfword value = tex_scan_int(0, NULL);
+ tex_tail_append(tex_new_penalty_node(value, user_penalty_subtype));
+ if (cur_list.mode == vmode) {
+ if (! lmt_page_builder_state.output_active) {
+ lmt_page_filter_callback(penalty_page_context, 0);
+ }
+ tex_build_page();
}
- tex_build_page();
}
}
@@ -4259,7 +4283,7 @@ static void tex_aux_set_shorthand_def(int a, int force)
switch (code) {
case char_def_code:
{
- halfword chr = tex_scan_char_number(0); /* maybe 1 */
+ halfword chr = tex_scan_char_number(0);
tex_define_again(a, p, char_given_cmd, chr);
break;
}
@@ -4377,16 +4401,6 @@ static void tex_aux_set_shorthand_def(int a, int force)
tex_define(a, p, fontspec_cmd, v);
}
break;
- /*
- case string_def_code:
- {
- halfword t = scan_toks_expand(0, NULL);
- halfword s = tokens_to_string(t);
- define(a, p, string_cmd, s - cs_offset_value);
- flush_list(t);
- break;
- }
- */
default:
tex_confusion("shorthand definition");
break;
@@ -6403,8 +6417,8 @@ inline static void tex_aux_big_switch(int mode, int cmd)
switch (cmd) {
case arithmic_cmd:
- case internal_int_cmd :
- case register_int_cmd :
+ case internal_int_cmd:
+ case register_int_cmd:
case internal_attribute_cmd:
case register_attribute_cmd:
case internal_posit_cmd:
@@ -6692,6 +6706,10 @@ void tex_initialize_variables(void)
math_end_class_par = math_end_class;
math_left_class_par = unset_noad_class;
math_right_class_par = unset_noad_class;
+ pre_inline_penalty_par = max_integer;
+ post_inline_penalty_par = max_integer;
+ pre_short_inline_penalty_par = max_integer;
+ post_short_inline_penalty_par = max_integer;
variable_family_par = -1,
ignore_depth_criterium_par = ignore_depth;
aux_get_date_and_time(&time_par, &day_par, &month_par, &year_par, &lmt_engine_state.utc_time);
diff --git a/source/luametatex/source/tex/texmaincontrol.h b/source/luametatex/source/tex/texmaincontrol.h
index 558db148f..f5c79bfdc 100644
--- a/source/luametatex/source/tex/texmaincontrol.h
+++ b/source/luametatex/source/tex/texmaincontrol.h
@@ -42,7 +42,7 @@ extern int tex_main_control (void);
extern void tex_normal_paragraph (int context);
extern void tex_begin_paragraph (int doindent, int context);
extern void tex_end_paragraph (int group, int context);
-extern int tex_wrapped_up_paragraph (int context);
+extern int tex_wrapped_up_paragraph (int context, int final);
extern void tex_insert_paragraph_token (void);
diff --git a/source/luametatex/source/tex/texmath.c b/source/luametatex/source/tex/texmath.c
index 00e67942c..a4ec71e5d 100644
--- a/source/luametatex/source/tex/texmath.c
+++ b/source/luametatex/source/tex/texmath.c
@@ -300,6 +300,16 @@ int tex_math_has_class_option(halfword cls, int option)
return (value & option) == option;
}
+int tex_math_has_class_parent(halfword cls)
+{
+ halfword value = count_parameter(first_math_options_code + cls);
+ if (value == no_class_options) {
+ unsigned parent = (unsigned) count_parameter(first_math_parent_code + cls);
+ return (parent >> 16) & 0xFF;
+ }
+ return 0;
+}
+
static void tex_aux_unsave_math(void)
{
tex_unsave();
@@ -2363,11 +2373,23 @@ static void tex_aux_math_math_component(halfword target, int append)
}
break;
case 's': case 'S':
- if (tex_scan_mandate_keyword("source", 1)) {
- noad_source(target) = tex_scan_int(0, NULL);
+ switch (tex_scan_character("ioIO", 0, 0, 0)) {
+ case 'i': case 'I':
+ if (tex_scan_mandate_keyword("single", 2)) {
+ noad_options(target) |= noad_option_single;
+ }
+ break;
+ case 'o': case 'O':
+ if (tex_scan_mandate_keyword("source", 2)) {
+ noad_source(target) = tex_scan_int(0, NULL);
+ }
+ break;
+ default:
+ tex_aux_show_keyword_error("single|source");
+ goto DONE;
}
break;
- case 't': case 'T':
+ case 't': case 'T':
if (tex_scan_mandate_keyword("textfont", 1)) {
usetextfont = math_atom_text_font_option;
}
@@ -2928,6 +2950,7 @@ void tex_run_math_accent(void)
halfword code = cur_chr;
halfword accent = tex_new_node(accent_noad, bothflexible_accent_subtype);
quarterword subtype = ordinary_noad_subtype;
+ halfword mathclass = accent_noad_subtype;
halfword attrlist = null;
if (cur_cmd == accent_cmd) {
tex_handle_error(
@@ -2945,15 +2968,48 @@ void tex_run_math_accent(void)
case math_uaccent_code:
/*tex |\Umathaccent| */
while (1) {
- switch (tex_scan_character("abcnsftokABCNSFTOK", 0, 1, 0)) {
+ switch (tex_scan_character("abcensftokABCENSFTOK", 0, 1, 0)) {
case 'a': case 'A':
- if (tex_scan_mandate_keyword("attr", 1)) {
- attrlist = tex_scan_attribute(attrlist);
+ switch (tex_scan_character("txTX", 0, 0, 0)) {
+ case 't': case 'T':
+ if (tex_scan_mandate_keyword("attr", 2)) {
+ attrlist = tex_scan_attribute(attrlist);
+ }
+ break;
+ // case 'x': case 'X':
+ // if (tex_scan_mandate_keyword("axis", 2)) {
+ // noad_options(accent) |= noad_option_axis;
+ // }
+ // break;
+ default:
+ // tex_aux_show_keyword_error("attr|axis");
+ tex_aux_show_keyword_error("attr");
+ goto DONE;
}
break;
case 'c': case 'C':
- if (tex_scan_mandate_keyword("center", 1)) {
- noad_options(accent) |= noad_option_center;
+ switch (tex_scan_character("elEL", 0, 0, 0)) {
+ case 'e': case 'E':
+ if (tex_scan_mandate_keyword("center", 2)) {
+ noad_options(accent) |= noad_option_center;
+ }
+ break;
+ case 'l': case 'L':
+ if (tex_scan_mandate_keyword("class", 2)) {
+ halfword c = (quarterword) tex_scan_math_class_number(0);
+ if (valid_math_class_code(c)) {
+ mathclass = c;
+ }
+ }
+ break;
+ default:
+ tex_aux_show_keyword_error("center|class");
+ goto DONE;
+ }
+ break;
+ case 'e': case 'E':
+ if (tex_scan_mandate_keyword("exact", 1)) {
+ noad_options(accent) |= noad_option_exact;
}
break;
case 's': case 'S':
@@ -3104,6 +3160,7 @@ void tex_run_math_accent(void)
noad_nucleus(accent) = n;
tex_aux_scan_math(n, tex_math_style_variant(cur_list.math_style, math_parameter_accent_variant), 0, 0, 0, 0, unset_noad_class, unset_noad_class);
}
+ set_noad_main_class(accent, mathclass);
}
/*tex
@@ -4704,6 +4761,34 @@ static void tex_aux_finish_displayed_math(int atleft, halfword eqnumber, halfwor
*/
+static inline int tex_aux_class_from_glyph(halfword n) {
+ return node_subtype(n) - (node_subtype(n) > glyph_math_extra_subtype ? glyph_math_extra_subtype : glyph_math_ordinary_subtype);
+}
+
+static int tex_aux_short_math(halfword m)
+{
+ // tex_show_node_list(m,10000,10000);
+ if (m) {
+ /* kern[] glyph[subtype -> class] vlist[scripts] kern[] */
+ if (node_type(m) == kern_node) {
+ m = node_next(m);
+ }
+ if (m && node_type(m) == glyph_node && tex_math_has_class_option(tex_aux_class_from_glyph(m), short_inline_class_option)) {
+ m = node_next(m);
+ } else {
+ return 0;
+ }
+ if (m && node_type(m) == vlist_node && node_subtype(m) == math_scripts_list) {
+ m = node_next(m);
+ }
+ if (m && node_type(m) == kern_node) {
+ m = node_next(m);
+ }
+ return ! m;
+ }
+ return 0;
+}
+
void tex_run_math_shift(void)
{
switch (cur_group) {
@@ -4719,7 +4804,7 @@ void tex_run_math_shift(void)
int mode = cur_list.mode;
int mathmode = cur_list.math_mode;
/*tex this pops the nest, the formula */
- halfword p = tex_aux_finish_math_list(null);
+ halfword mathlist = tex_aux_finish_math_list(null);
int mathleft = cur_list.math_begin;
int mathright = cur_list.math_end;
if (cur_cmd == math_shift_cs_cmd) {
@@ -4745,7 +4830,7 @@ void tex_run_math_shift(void)
tex_aux_check_display_math_end();
break;
}
- tex_run_mlist_to_hlist(p, 0, text_style, unset_noad_class, unset_noad_class);
+ tex_run_mlist_to_hlist(mathlist, 0, text_style, unset_noad_class, unset_noad_class);
eqnumber = tex_hpack(node_next(temp_head), 0, packing_additional, direction_unknown, holding_none_option);
attach_current_attribute_list(eqnumber);
tex_aux_unsave_math();
@@ -4754,7 +4839,7 @@ void tex_run_math_shift(void)
if (saved_type(saved_equation_number_item_location) == equation_number_location_save_type) {
atleft = saved_value(saved_equation_number_item_location) == left_location_code;
mode = cur_list.mode;
- p = tex_aux_finish_math_list(null);
+ mathlist = tex_aux_finish_math_list(null);
} else {
tex_confusion("after math");
}
@@ -4769,7 +4854,9 @@ void tex_run_math_shift(void)
the space above that display.
*/
- halfword math = tex_new_node(math_node, begin_inline_math);
+ halfword beginmath = tex_new_node(math_node, begin_inline_math);
+ halfword endmath = tex_new_node(math_node, end_inline_math);
+ halfword shortmath = 0;
if (mathmode) {
switch (cur_cmd) {
case math_shift_cs_cmd:
@@ -4784,68 +4871,81 @@ void tex_run_math_shift(void)
} else if (cur_cmd == math_shift_cs_cmd) {
tex_aux_check_inline_math_end();
}
- tex_tail_append(math);
- math_penalty(math) = pre_inline_penalty_par;
+ tex_tail_append(beginmath);
+ if (pre_inline_penalty_par != max_integer) {
+ math_penalty(beginmath) = pre_inline_penalty_par;
+ }
/*tex begin mathskip code */
switch (math_skip_mode_par) {
case math_skip_surround_when_zero:
if (! tex_glue_is_zero(math_skip_par)) {
- tex_copy_glue_values(math, math_skip_par);
+ tex_copy_glue_values(beginmath, math_skip_par);
} else {
- math_surround(math) = math_surround_par;
+ math_surround(beginmath) = math_surround_par;
}
break ;
case math_skip_always_left:
case math_skip_always_both:
case math_skip_only_when_skip:
- tex_copy_glue_values(math, math_skip_par);
+ tex_copy_glue_values(beginmath, math_skip_par);
break ;
case math_skip_always_right:
case math_skip_ignore:
break ;
case math_skip_always_surround:
default:
- math_surround(math) = math_surround_par;
+ math_surround(beginmath) = math_surround_par;
break;
}
/*tex end mathskip code */
if (cur_list.math_dir) {
tex_tail_append(tex_new_dir(normal_dir_subtype, math_direction_par));
}
- tex_run_mlist_to_hlist(p, cur_list.mode > nomode, is_valid_math_style(cur_list.math_main_style) ? cur_list.math_main_style : text_style, cur_list.math_begin, cur_list.math_end);
+ tex_run_mlist_to_hlist(mathlist, cur_list.mode > nomode, is_valid_math_style(cur_list.math_main_style) ? cur_list.math_main_style : text_style, cur_list.math_begin, cur_list.math_end);
+ shortmath = tex_aux_short_math(node_next(temp_head));
tex_try_couple_nodes(cur_list.tail, node_next(temp_head));
cur_list.tail = tex_tail_of_node_list(cur_list.tail);
if (cur_list.math_dir) {
tex_tail_append(tex_new_dir(cancel_dir_subtype, math_direction_par));
}
cur_list.math_dir = 0;
- math = tex_new_node(math_node, end_inline_math);
- tex_tail_append(math);
- math_penalty(math) = post_inline_penalty_par;
+ tex_tail_append(endmath);
+ /* */
+ if (post_inline_penalty_par != max_integer) {
+ math_penalty(endmath) = post_inline_penalty_par;
+ }
/*tex begin mathskip code */
switch (math_skip_mode_par) {
case math_skip_surround_when_zero :
if (! tex_glue_is_zero(math_skip_par)) {
- tex_copy_glue_values(math, math_skip_par);
- math_surround(math) = 0;
+ tex_copy_glue_values(endmath, math_skip_par);
+ math_surround(endmath) = 0;
} else {
- math_surround(math) = math_surround_par;
+ math_surround(endmath) = math_surround_par;
}
break;
case math_skip_always_right:
case math_skip_always_both:
case math_skip_only_when_skip:
- tex_copy_glue_values(math, math_skip_par);
+ tex_copy_glue_values(endmath, math_skip_par);
break;
case math_skip_always_left:
case math_skip_ignore:
break;
case math_skip_always_surround:
default:
- math_surround(math) = math_surround_par;
+ math_surround(endmath) = math_surround_par;
break;
}
/*tex end mathskip code */
+ if (shortmath) {
+ if (pre_short_inline_penalty_par != max_integer) {
+ math_penalty(beginmath) = pre_short_inline_penalty_par;
+ }
+ if (post_short_inline_penalty_par != max_integer) {
+ math_penalty(endmath) = post_short_inline_penalty_par;
+ }
+ }
cur_list.space_factor = default_space_factor;
mathleft = cur_list.math_begin;
mathright = cur_list.math_end;
@@ -4858,7 +4958,7 @@ void tex_run_math_shift(void)
tex_aux_check_display_math_end();
}
}
- tex_run_mlist_to_hlist(p, 0, display_style, cur_list.math_begin, cur_list.math_end);
+ tex_run_mlist_to_hlist(mathlist, 0, display_style, cur_list.math_begin, cur_list.math_end);
mathleft = cur_list.math_begin;
mathright = cur_list.math_end;
tex_aux_finish_displayed_math(atleft, eqnumber, node_next(temp_head));
diff --git a/source/luametatex/source/tex/texmath.h b/source/luametatex/source/tex/texmath.h
index ba02e8373..65f706a3c 100644
--- a/source/luametatex/source/tex/texmath.h
+++ b/source/luametatex/source/tex/texmath.h
@@ -272,10 +272,12 @@ typedef enum math_class_options {
auto_inject_class_option = 0x0100000,
remove_italic_correction_class_option = 0x0200000,
operator_italic_correction_class_option = 0x0400000,
+ short_inline_class_option = 0x0800000,
no_class_options = 0xF000000,
} math_class_options;
extern int tex_math_has_class_option(halfword cls, int option);
+extern int tex_math_has_class_parent(halfword cls);
typedef enum math_atom_font_options {
math_atom_no_font_option = 0,
diff --git a/source/luametatex/source/tex/texmlist.c b/source/luametatex/source/tex/texmlist.c
index c5613d90a..69de1c8c6 100644
--- a/source/luametatex/source/tex/texmlist.c
+++ b/source/luametatex/source/tex/texmlist.c
@@ -542,12 +542,10 @@ inline static int tex_aux_checked_left_kern_fnt_chr(halfword fnt, halfword chr,
halfword hastop = (state & prime_script_state) || (state & post_super_script_state);
halfword hasbot = state & post_sub_script_state;
if (hastop && tex_math_has_class_option(subtype, left_top_kern_class_option)) {
-// top = tex_char_top_left_kern_from_font(fnt, chr);
-top = tex_aux_math_x_size_scaled(fnt, tex_char_top_left_kern_from_font(fnt, chr), size);
+ top = tex_aux_math_x_size_scaled(fnt, tex_char_top_left_kern_from_font(fnt, chr), size);
}
if (hasbot && tex_math_has_class_option(subtype, left_bottom_kern_class_option)) {
-// bot = tex_char_bottom_left_kern_from_font(fnt, chr);
-bot = tex_aux_math_x_size_scaled(fnt, tex_char_bottom_left_kern_from_font(fnt, chr), size);
+ bot = tex_aux_math_x_size_scaled(fnt, tex_char_bottom_left_kern_from_font(fnt, chr), size);
}
if (hastop && hasbot) {
return top > bot ? top : bot;
@@ -574,12 +572,10 @@ inline static int tex_aux_checked_right_kern_fnt_chr(halfword fnt, halfword chr,
halfword hastop = state & pre_super_script_state;
halfword hasbot = state & pre_sub_script_state;
if (hastop && tex_math_has_class_option(subtype, right_top_kern_class_option)) {
-// top = tex_char_top_right_kern_from_font(fnt, chr);
-top = tex_aux_math_x_size_scaled(fnt, tex_char_top_right_kern_from_font(fnt, chr), size);
+ top = tex_aux_math_x_size_scaled(fnt, tex_char_top_right_kern_from_font(fnt, chr), size);
}
if (hasbot && tex_math_has_class_option(subtype, right_bottom_kern_class_option)) {
-// bot = tex_char_bottom_right_kern_from_font(fnt, chr);
-bot = tex_aux_math_x_size_scaled(fnt, tex_char_bottom_right_kern_from_font(fnt, chr), size);
+ bot = tex_aux_math_x_size_scaled(fnt, tex_char_bottom_right_kern_from_font(fnt, chr), size);
}
if (hastop && hasbot) {
return top < bot ? bot : top;
@@ -783,7 +779,7 @@ static halfword tex_aux_fraction_rule(scaled width, scaled height, halfword att,
if (! rule) {
if (math_rules_mode_par) {
rule = tex_new_rule_node(ruletype);
- rule_data(rule) = tex_fam_fnt(fam, size);
+ rule_data(rule) = tex_fam_fnt(fam, size); // we have font/fam/chr fields, why not use these
} else {
rule = tex_new_rule_node(normal_rule_subtype);
}
@@ -3154,7 +3150,14 @@ static void tex_aux_do_make_math_accent(halfword target, halfword accentfnt, hal
}
} else { /* if (flags & overlay_accent_code) { */
/*tex Center the accent vertically around base: */
- delta = tex_half_scaled(box_total(accent) + box_total(base));
+ if (has_noad_option_exact(target)) {
+ delta = box_height(base) + box_depth(accent);
+ } else {
+ delta = tex_half_scaled(box_total(accent) + box_total(base));
+ }
+ // if (has_noad_option_axis(target)) {
+ // delta -= tex_aux_math_axis(size);
+ // }
}
if (accenttotal) {
*accenttotal = box_total(accent);
@@ -3175,7 +3178,11 @@ static void tex_aux_do_make_math_accent(halfword target, halfword accentfnt, hal
baseheight = box_height(base);
}
/*tex The top accents of both characters are aligned. */
- {
+ if (flags & overlay_accent_code) {
+ /* We ignore overshoot here, at leats for now. */
+ box_shift_amount(accent) = tex_half_scaled(basewidth - box_width(accent));
+ box_width(accent) = 0; /* in gyre zero anyway */
+ } else {
halfword accentwidth = box_width(accent);
if (accentwidth > basewidth && has_noad_option_nooverflow(target)) {
/*tex
@@ -3201,9 +3208,9 @@ static void tex_aux_do_make_math_accent(halfword target, halfword accentfnt, hal
} else {
/*tex When we scale we center. */
if (flags & top_accent_code) {
- anchor = tex_char_unchecked_top_anchor_from_font(accentfnt, accentchr); /* no bot accent key */
+ anchor = tex_char_unchecked_top_anchor_from_font(accentfnt, accentchr);
} else if (flags & bot_accent_code) {
- anchor = tex_char_unchecked_bottom_anchor_from_font(accentfnt, accentchr); /* no bot accent key */
+ anchor = tex_char_unchecked_bottom_anchor_from_font(accentfnt, accentchr);
} else {
anchor = INT_MIN;
}
@@ -5070,12 +5077,18 @@ static void tex_aux_make_scripts(halfword target, halfword kernel, scaled italic
shift_down = 0;
break;
default:
- /*tex Used for optimizing accents. */
- kernelsize.ht -= supdrop;
- /*tex These parameters are only applied in an assembly (and often some 0.5 .. 1.5 pt on 12pt). */
- prime_up = kernelsize.ht - tex_get_math_y_parameter_default(style, math_parameter_prime_shift_drop, 0);
- shift_up = kernelsize.ht - tex_get_math_y_parameter_checked(style, math_parameter_superscript_shift_drop);
- shift_down = kernelsize.dp + tex_get_math_y_parameter_checked(style, math_parameter_subscript_shift_drop);
+ if (has_noad_option_single(target)) {
+ prime_up = 0;
+ shift_up = 0;
+ shift_down = 0;
+ } else {
+ /*tex Used for optimizing accents. */
+ kernelsize.ht -= supdrop;
+ /*tex These parameters are only applied in an assembly (and often some 0.5 .. 1.5 pt on 12pt). */
+ prime_up = kernelsize.ht - tex_get_math_y_parameter_default(style, math_parameter_prime_shift_drop, 0);
+ shift_up = kernelsize.ht - tex_get_math_y_parameter_checked(style, math_parameter_superscript_shift_drop);
+ shift_down = kernelsize.dp + tex_get_math_y_parameter_checked(style, math_parameter_subscript_shift_drop);
+ }
break;
}
/*tex
@@ -6965,7 +6978,8 @@ static void tex_mlist_to_hlist_finalize_list(mliststate *state)
break;
case accent_noad:
current_type = simple_noad; /*tex Same kind of fields. */
- current_subtype = accent_noad_subtype;
+ // current_subtype = accent_noad_subtype;
+ current_subtype = get_noad_main_class(current);
current_left_slack = noad_left_slack(current);
current_right_slack = noad_right_slack(current);
break;
diff --git a/source/luametatex/source/tex/texnodes.c b/source/luametatex/source/tex/texnodes.c
index 63cf9e4c3..39dedb97f 100644
--- a/source/luametatex/source/tex/texnodes.c
+++ b/source/luametatex/source/tex/texnodes.c
@@ -157,12 +157,14 @@ void lmt_nodelib_initialize(void) {
set_value_entry_key(subtypes_glue, g_leaders, gleaders)
set_value_entry_key(subtypes_glue, u_leaders, uleaders)
- subtypes_boundary = lmt_aux_allocate_value_info(word_boundary);
+ subtypes_boundary = lmt_aux_allocate_value_info(par_boundary);
set_value_entry_key(subtypes_boundary, cancel_boundary, cancel)
set_value_entry_key(subtypes_boundary, user_boundary, user)
set_value_entry_key(subtypes_boundary, protrusion_boundary, protrusion)
set_value_entry_key(subtypes_boundary, word_boundary, word)
+ set_value_entry_key(subtypes_boundary, page_boundary, page)
+ set_value_entry_key(subtypes_boundary, par_boundary, par)
subtypes_penalty = lmt_aux_allocate_value_info(equation_number_penalty_subtype);
@@ -2724,11 +2726,38 @@ void tex_show_node_list(halfword p, int threshold, int max)
if (rule_depth(p)) {
tex_print_format(", depth %R", rule_depth(p));
}
- if (rule_left(p)) {
- tex_print_format(", left / top %R", rule_left(p));
- }
- if (rule_right(p)) {
- tex_print_format(", right / bottom %R", rule_right(p));
+ switch (node_subtype(p)) {
+ case virtual_rule_subtype:
+ if (rule_virtual_width(p)) {
+ tex_print_format(", virtual width %R", rule_virtual_width(p));
+ }
+ if (rule_virtual_height(p)) {
+ tex_print_format(", virtual height %R", rule_virtual_height(p));
+ }
+ if (rule_virtual_depth(p)) {
+ tex_print_format(", virtual depth %R", rule_virtual_depth(p));
+ }
+ break;
+ case strut_rule_subtype:
+ if (rule_strut_font(p)) {
+ if (rule_strut_font(p) >= rule_font_fam_offset) {
+ tex_print_format(", family %i", rule_strut_font(p) - rule_font_fam_offset);
+ } else {
+ tex_print_format(", font %F", rule_strut_font(p) < 0 ? 0 : rule_strut_font(p));
+ }
+ }
+ if (rule_strut_character(p)) {
+ tex_print_format(", character %U", rule_strut_character(p));
+ }
+ /* fall through */
+ default:
+ if (rule_left(p)) {
+ tex_print_format(", left / top %R", rule_left(p));
+ }
+ if (rule_right(p)) {
+ tex_print_format(", right / bottom %R", rule_right(p));
+ }
+ break;
}
if (rule_x_offset(p)) {
tex_print_format(", xoffset %R", rule_x_offset(p));
@@ -2736,15 +2765,8 @@ void tex_show_node_list(halfword p, int threshold, int max)
if (rule_y_offset(p)) {
tex_print_format(", yoffset %R", rule_y_offset(p));
}
- if (rule_font(p)) {
- if (rule_font(p) >= rule_font_fam_offset) {
- tex_print_format(", family %i", rule_font(p) - rule_font_fam_offset);
- } else {
- tex_print_format(", font %F", rule_font(p) < 0 ? 0 : rule_font(p));
- }
- }
- if (rule_character(p)) {
- tex_print_format(", character %U", rule_character(p));
+ if (rule_data(p)) {
+ tex_print_format(", data %R", rule_data(p));
}
break;
case insert_node:
diff --git a/source/luametatex/source/tex/texnodes.h b/source/luametatex/source/tex/texnodes.h
index 7fa050428..9e7d3ef13 100644
--- a/source/luametatex/source/tex/texnodes.h
+++ b/source/luametatex/source/tex/texnodes.h
@@ -140,7 +140,9 @@ typedef enum node_types {
/*tex These two are active nodes. */
unhyphenated_node,
hyphenated_node,
+ /*tex This one can also be in the active list. */
delta_node,
+ /*tex While this is an indirect one carrying data. */
passive_node,
} node_types;
@@ -631,9 +633,9 @@ inline static int tex_is_par_init_glue(halfword n)
typedef enum kern_subtypes {
font_kern_subtype,
- explicit_kern_subtype, /*tex |subtype| of kern nodes from |\kern| and |\/| */
+ explicit_kern_subtype, /*tex |subtype| of kern nodes from |\kern| */
accent_kern_subtype, /*tex |subtype| of kern nodes from accents */
- italic_kern_subtype,
+ italic_kern_subtype, /*tex |subtype| of kern nodes from |\/| */
left_margin_kern_subtype,
right_margin_kern_subtype,
explicit_math_kern_subtype,
@@ -1001,10 +1003,18 @@ typedef enum rule_codes {
# define rule_y_offset(a) vinfo(a,3)
# define rule_height(a) vlink(a,4)
# define rule_data(a) vinfo(a,4)
-# define rule_left(a) vinfo(a,5)
-# define rule_right(a) vlink(a,5)
-# define rule_font(a) vinfo(a,6)
-# define rule_character(a) vlink(a,6)
+# define rule_left(a) vinfo(a,5) /* depends on subtype */
+# define rule_right(a) vlink(a,5) /* depends on subtype */
+# define rule_extra_1(a) vinfo(a,6) /* depends on subtype */
+# define rule_extra_2(a) vlink(a,6) /* depends on subtype */
+
+# define rule_strut_font rule_extra_1
+# define rule_strut_character rule_extra_2
+
+# define rule_virtual_width rule_left
+# define rule_virtual_height rule_right
+# define rule_virtual_depth rule_extra_1
+# define rule_virtual_unused rule_extra_2
# define rule_total(a) (rule_height(a) + rule_depth(a))
@@ -1160,6 +1170,8 @@ typedef enum glyph_subtypes {
glyph_math_accent_subtype,
glyph_math_fenced_subtype,
glyph_math_ghost_subtype,
+ /* bogus subtype */
+ glyph_math_vcenter_subtype,
/* extra math, user classes, set but anonymous */
glyph_math_extra_subtype = 31,
} glyph_subtypes;
@@ -1793,6 +1805,7 @@ typedef enum noad_options {
# define noad_option_center (uint64_t) 0x04000000000
# define noad_option_scale (uint64_t) 0x08000000000
# define noad_option_keep_base (uint64_t) 0x10000000000
+# define noad_option_single (uint64_t) 0x20000000000
# define has_option(a,b) (((a) & (b)) == (b))
# define unset_option(a,b) ((a) & ~(b))
@@ -1856,6 +1869,7 @@ inline static int has_noad_no_script_option(halfword n, halfword option)
# define has_noad_option_auto_base(a) (has_option(noad_options(a), noad_option_auto_base))
# define has_noad_option_scale(a) (has_option(noad_options(a), noad_option_scale))
# define has_noad_option_keep_base(a) (has_option(noad_options(a), noad_option_keep_base))
+# define has_noad_option_single(a) (has_option(noad_options(a), noad_option_single))
/*tex
In the meantime the codes and subtypes are in sync. The variable component does not really
@@ -2118,7 +2132,7 @@ typedef enum math_kernel_options {
typedef enum boundary_subtypes {
cancel_boundary,
user_boundary,
- protrusion_boundary,
+ protrusion_boundary, /* 1=left, 2=right, 3=both */
word_boundary,
page_boundary,
par_boundary,
@@ -2283,7 +2297,7 @@ static int par_category_to_codes[] = {
/*tex
Todo: make the fields 6+ into a par_state node so that local box ones can be
small. Also, penalty and broken fields now are duplicate. Do we need to keep
- these?
+ these?
*/
# define par_node_size 28
@@ -2312,12 +2326,12 @@ static int par_category_to_codes[] = {
# define par_looseness(a) vinfo(a,13)
# define par_last_line_fit(a) vlink(a,13)
# define par_line_penalty(a) vinfo(a,14)
-# define par_inter_line_penalty(a) vlink(a,14)
+# define par_inter_line_penalty(a) vlink(a,14) /* */
# define par_club_penalty(a) vinfo(a,15)
# define par_widow_penalty(a) vlink(a,15)
# define par_display_widow_penalty(a) vinfo(a,16)
# define par_orphan_penalty(a) vlink(a,16)
-# define par_broken_penalty(a) vinfo(a,17)
+# define par_broken_penalty(a) vinfo(a,17) /* */
# define par_adj_demerits(a) vlink(a,17)
# define par_double_hyphen_demerits(a) vinfo(a,18)
# define par_final_hyphen_demerits(a) vlink(a,18)
@@ -2338,7 +2352,15 @@ static int par_category_to_codes[] = {
# define par_shaping_penalties_mode(a) vinfo(a,26)
# define par_shaping_penalty(a) vlink(a,26)
# define par_par_init_left_skip(a) vlink(a,27)
-# define par_par_init_right_skip(a) vinfo(a,27)
+# define par_par_init_right_skip(a) vinfo(a,27)
+
+/*
+ At some point we will have this (array with double values), depends on the outcome of an
+ experiment but I want to reserve this. We then also patch |texlocalboxes.c| line 295+.
+*/
+
+// define par_lousyness(a) vinfo(a,2) /* par_penalty_interline */
+// define par_reserved(a) vlink(a,2) /* par_penalty_broken */
typedef enum par_subtypes {
vmode_par_par_subtype,
@@ -2403,15 +2425,19 @@ inline static int tex_par_to_be_set (halfword state, halfword what) { re
spot.
*/
-/* is vinfo(a,2) used? it not we can have fitness there and hyphenated/unyphenates as subtype */
+/*tex
+ We can use vinfo(a,2) for fitness instead the subtype field. But then we also need to set
+ it explicitly because now that happens in the allocator.
+*/
# define active_node_size 4 /*tex |hyphenated_node| or |unhyphenated_node| */
# define active_fitness node_subtype /*tex |very_loose_fit..tight_fit| on final line for this break */
# define active_break_node(a) vlink(a,1) /*tex pointer to the corresponding passive node */
# define active_line_number(a) vinfo(a,1) /*tex line that begins at this breakpoint */
# define active_total_demerits(a) vlink(a,2) /*tex the quantity that \TEX\ minimizes */
-# define active_short(a) vinfo(a,3) /*tex |shortfall| of this line */
+# define active_reserved(a) vinfo(a,2)
# define active_glue(a) vlink(a,3) /*tex corresponding glue stretch or shrink */
+# define active_short(a) vinfo(a,3) /*tex |shortfall| of this line */
# define passive_node_size 7
# define passive_cur_break(a) vlink(a,1) /*tex in passive node, points to position of this breakpoint */
@@ -2670,6 +2696,11 @@ typedef enum glue_amounts {
total_shrink_amount = 7, // 2 //
font_stretch_amount = 8, // 8 //
font_shrink_amount = 9, // 9 //
+ /* */
+ max_height_amount = 10,
+ max_depth_amount = 11,
+ /* */
+ n_of_glue_amounts = 12,
} glue_amounts;
# define min_glue_order normal_glue_order
diff --git a/source/luametatex/source/tex/texpackaging.c b/source/luametatex/source/tex/texpackaging.c
index ad1db455c..dbc569246 100644
--- a/source/luametatex/source/tex/texpackaging.c
+++ b/source/luametatex/source/tex/texpackaging.c
@@ -641,7 +641,7 @@ scaled tex_right_marginkern(halfword p)
/*tex
Character protrusion is something we inherited from \PDFTEX\ and the next helper calculates
- the extend.
+ the extend. Is this |last_*_char| logic still valid?
*/
@@ -1488,40 +1488,40 @@ halfword tex_hpack(halfword p, scaled w, int m, singleword pack_direction, int r
box_glue_sign(r) = normal_glue_sign;
box_glue_set(r) = 0.0;
}
- if ((lmt_packaging_state.total_shrink[o] < -x) && (o == normal_glue_order) && (box_list(r))) {
- int overshoot = -x - lmt_packaging_state.total_shrink[normal_glue_order];
- lmt_packaging_state.last_badness = 1000000;
- lmt_packaging_state.last_overshoot = overshoot;
- /*tex Use the maximum shrinkage */
- box_glue_set(r) = 1.0;
- /*tex Report an overfull hbox and |goto common_ending|, if this box is sufficiently bad. */
- if ((overshoot > hfuzz_par) || (hbadness_par < 100)) {
- int callback_id = lmt_callback_defined(hpack_quality_callback);
- halfword rule = null;
- if (callback_id > 0) {
- lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "SdNddS->N",
- "overfull",
- overshoot,
- r,
- abs(lmt_packaging_state.pack_begin_line),
- lmt_input_state.input_line,
- tex_current_input_file_name(),
- &rule);
- } else if (q && overfull_rule_par > 0) {
- rule = tex_new_rule_node(normal_rule_subtype);
- rule_width(rule) = overfull_rule_par;
- }
- if (rule && rule != r) {
- tex_aux_append_diagnostic_rule(r, rule);
- }
- if (callback_id == 0) {
- tex_print_nlp();
- tex_print_format("%l[package: overfull \\hbox (%D too wide)", overshoot, pt_unit);
- goto COMMON_ENDING;
+ if (o == normal_glue_order && box_list(r)) {
+ if (lmt_packaging_state.total_shrink[o] < -x) {
+ int overshoot = -x - lmt_packaging_state.total_shrink[normal_glue_order];
+ lmt_packaging_state.last_badness = 1000000;
+ lmt_packaging_state.last_overshoot = overshoot;
+ /*tex Use the maximum shrinkage */
+ box_glue_set(r) = 1.0;
+ /*tex Report an overfull hbox and |goto common_ending|, if this box is sufficiently bad. */
+ if ((overshoot > hfuzz_par) || (hbadness_par < 100)) {
+ int callback_id = lmt_callback_defined(hpack_quality_callback);
+ halfword rule = null;
+ if (callback_id > 0) {
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "SdNddS->N",
+ "overfull",
+ overshoot,
+ r,
+ abs(lmt_packaging_state.pack_begin_line),
+ lmt_input_state.input_line,
+ tex_current_input_file_name(),
+ &rule);
+ } else if (q && overfull_rule_par > 0) {
+ rule = tex_new_rule_node(normal_rule_subtype);
+ rule_width(rule) = overfull_rule_par;
+ }
+ if (rule && rule != r) {
+ tex_aux_append_diagnostic_rule(r, rule);
+ }
+ if (callback_id == 0) {
+ tex_print_nlp();
+ tex_print_format("%l[package: overfull \\hbox (%D too wide)", overshoot, pt_unit);
+ goto COMMON_ENDING;
+ }
}
- }
- } else if (o == normal_glue_order) {
- if (box_list(r)) {
+ } else {
/*tex Report a tight hbox and |goto common_ending|, if this box is sufficiently bad. */
lmt_packaging_state.last_badness = tex_badness(-x, lmt_packaging_state.total_shrink[normal_glue_order]);
if (lmt_packaging_state.last_badness > hbadness_par) {
@@ -2232,33 +2232,33 @@ halfword tex_vpack(halfword p, scaled h, int m, scaled l, singleword pack_direct
box_glue_sign(r) = normal_glue_sign;
box_glue_set(r) = 0.0;
}
- if ((lmt_packaging_state.total_shrink[o] < -x) && (o == normal_glue_order) && (box_list(r))) {
- int overshoot = -x - lmt_packaging_state.total_shrink[normal_glue_order];
- lmt_packaging_state.last_badness = 1000000;
- lmt_packaging_state.last_overshoot = overshoot;
- /*tex Use the maximum shrinkage */
- box_glue_set(r) = 1.0;
- /*tex Report an overfull vbox and |goto common_ending|, if this box is sufficiently bad. */
- if ((overshoot > vfuzz_par) || (vbadness_par < 100)) {
- int callback_id = lmt_callback_defined(vpack_quality_callback);
- if (callback_id > 0) {
- lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "SdNddS->",
- "overfull",
- overshoot,
- r,
- abs(lmt_packaging_state.pack_begin_line),
- lmt_input_state.input_line,
- tex_current_input_file_name()
- );
- goto EXIT;
- } else {
- tex_print_nlp();
- tex_print_format("%l[package: overfull \\vbox (%D too high)", - x - lmt_packaging_state.total_shrink[normal_glue_order], pt_unit);
- goto COMMON_ENDING;
+ if (o == normal_glue_order && box_list(r)) {
+ if (lmt_packaging_state.total_shrink[o] < -x) {
+ int overshoot = -x - lmt_packaging_state.total_shrink[normal_glue_order];
+ lmt_packaging_state.last_badness = 1000000;
+ lmt_packaging_state.last_overshoot = overshoot;
+ /*tex Use the maximum shrinkage */
+ box_glue_set(r) = 1.0;
+ /*tex Report an overfull vbox and |goto common_ending|, if this box is sufficiently bad. */
+ if ((overshoot > vfuzz_par) || (vbadness_par < 100)) {
+ int callback_id = lmt_callback_defined(vpack_quality_callback);
+ if (callback_id > 0) {
+ lmt_run_callback(lmt_lua_state.lua_instance, callback_id, "SdNddS->",
+ "overfull",
+ overshoot,
+ r,
+ abs(lmt_packaging_state.pack_begin_line),
+ lmt_input_state.input_line,
+ tex_current_input_file_name()
+ );
+ goto EXIT;
+ } else {
+ tex_print_nlp();
+ tex_print_format("%l[package: overfull \\vbox (%D too high)", - x - lmt_packaging_state.total_shrink[normal_glue_order], pt_unit);
+ goto COMMON_ENDING;
+ }
}
- }
- } else if (o == normal_glue_order) {
- if (box_list(r)) {
+ } else {
/*tex Report a tight vbox and |goto common_ending|, if this box is sufficiently bad. */
lmt_packaging_state.last_badness = tex_badness(-x, lmt_packaging_state.total_shrink[normal_glue_order]);
if (lmt_packaging_state.last_badness > vbadness_par) {
@@ -2382,7 +2382,7 @@ void tex_run_vcenter(void)
void tex_finish_vcenter_group(void)
{
- if (! tex_wrapped_up_paragraph(vcenter_par_context)) {
+ if (! tex_wrapped_up_paragraph(vcenter_par_context, 1)) {
halfword p;
tex_end_paragraph(vcenter_group, vcenter_par_context);
tex_package(vbox_code); /* todo: vcenter_code */
diff --git a/source/luametatex/source/tex/texprimitive.c b/source/luametatex/source/tex/texprimitive.c
index 027f62d23..54ef9b1b1 100644
--- a/source/luametatex/source/tex/texprimitive.c
+++ b/source/luametatex/source/tex/texprimitive.c
@@ -899,12 +899,6 @@ void tex_print_cmd_chr(singleword cmd, halfword chr)
/*tex Kind of special. */
tex_print_str_esc("notexpanded");
break;
- /*
- case string_cmd:
- print_str("string:->");
- print(cs_offset_value + chr);
- break;
- */
case internal_box_reference_cmd:
tex_print_str_esc("hiddenlocalbox");
break;
diff --git a/source/luametatex/source/tex/texprinting.c b/source/luametatex/source/tex/texprinting.c
index 9e502fbdf..518f1cf43 100644
--- a/source/luametatex/source/tex/texprinting.c
+++ b/source/luametatex/source/tex/texprinting.c
@@ -223,6 +223,8 @@ void tex_print_char(int s)
*/
+/* no_print terminal | logfile | terminal_and_logfile | pseudo | new_string | luabuffer */
+
static void tex_aux_uprint(int s)
{
/*tex We're not sure about this so it's disabled for now! */
@@ -235,7 +237,6 @@ static void tex_aux_uprint(int s)
*/
if (s == new_line_char_par && lmt_print_state.selector < pseudo_selector_code) {
tex_print_ln();
- return;
} else if (s <= 0x7F) {
tex_print_char(s);
} else if (s <= 0x7FF) {
diff --git a/source/luametatex/source/tex/texrules.c b/source/luametatex/source/tex/texrules.c
index db993585b..560ca73df 100644
--- a/source/luametatex/source/tex/texrules.c
+++ b/source/luametatex/source/tex/texrules.c
@@ -67,25 +67,41 @@ halfword tex_aux_scan_rule_spec(rule_types type, halfword code)
}
break;
case 'l': case 'L':
- if (tex_scan_mandate_keyword("left", 1)) {
- rule_left(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
+ if (node_subtype(rule) != virtual_rule_subtype) {
+ if (tex_scan_mandate_keyword("left", 1)) {
+ rule_left(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
+ }
+ break;
+ } else {
+ goto DONE;
}
- break;
case 'r': case 'R':
- if (tex_scan_mandate_keyword("right", 1)) {
- rule_right(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
+ if (node_subtype(rule) != virtual_rule_subtype) {
+ if (tex_scan_mandate_keyword("right", 1)) {
+ rule_right(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
+ }
+ break;
+ } else {
+ goto DONE;
}
- break;
case 't': case 'T': /* just because it's nicer */
- if (tex_scan_mandate_keyword("top", 1)) {
- rule_left(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
+ if (node_subtype(rule) != virtual_rule_subtype) {
+ if (tex_scan_mandate_keyword("top", 1)) {
+ rule_left(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
+ }
+ break;
+ } else {
+ goto DONE;
}
- break;
case 'b': case 'B': /* just because it's nicer */
- if (tex_scan_mandate_keyword("bottom", 1)) {
- rule_right(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
+ if (node_subtype(rule) != virtual_rule_subtype) {
+ if (tex_scan_mandate_keyword("bottom", 1)) {
+ rule_right(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
+ }
+ break;
+ } else {
+ goto DONE;
}
- break;
case 'x': case 'X':
if (tex_scan_mandate_keyword("xoffset", 1)) {
rule_x_offset(rule) = tex_scan_dimen(0, 0, 0, 0, NULL);
@@ -97,44 +113,56 @@ halfword tex_aux_scan_rule_spec(rule_types type, halfword code)
}
break;
case 'f': case 'F':
- switch (tex_scan_character("aoAO", 0, 0, 0)) {
- case 'o': case 'O':
- if (tex_scan_mandate_keyword("font", 2)) {
- tex_set_rule_font(rule, tex_scan_font_identifier(NULL));
- }
- break;
- case 'a': case 'A':
- if (tex_scan_mandate_keyword("fam", 2)) {
- tex_set_rule_family(rule, tex_scan_math_family_number());
- }
- break;
- default:
- tex_aux_show_keyword_error("font|fam");
- goto DONE;
+ if (node_subtype(rule) != virtual_rule_subtype) {
+ switch (tex_scan_character("aoAO", 0, 0, 0)) {
+ case 'o': case 'O':
+ if (tex_scan_mandate_keyword("font", 2)) {
+ tex_set_rule_font(rule, tex_scan_font_identifier(NULL));
+ }
+ break;
+ case 'a': case 'A':
+ if (tex_scan_mandate_keyword("fam", 2)) {
+ tex_set_rule_family(rule, tex_scan_math_family_number());
+ }
+ break;
+ default:
+ tex_aux_show_keyword_error("font|fam");
+ goto DONE;
+ }
+ break;
+ } else {
+ goto DONE;
}
- break;
case 'c': case 'C':
- if (tex_scan_mandate_keyword("char", 1)) {
- rule_character(rule) = tex_scan_char_number(0);
+ if (node_subtype(rule) != virtual_rule_subtype) {
+ if (tex_scan_mandate_keyword("char", 1)) {
+ rule_strut_character(rule) = tex_scan_char_number(0);
+ }
+ break;
+ } else {
+ goto DONE;
}
- break;
default:
goto DONE;
}
}
DONE:
node_attr(rule) = attr;
- if (type == v_rule_type && code == strut_rule_code) {
- tex_aux_check_text_strut_rule(rule, text_style);
- }
- if (code == virtual_rule_code) {
- rule_data(rule) = rule_width(rule);
- rule_left(rule) = rule_height(rule);
- rule_right(rule) = rule_depth(rule);
- rule_width(rule) = 0;
- rule_height(rule) = 0;
- rule_depth(rule) = 0;
- node_subtype(rule) = virtual_rule_subtype;
+ switch (code) {
+ case strut_rule_code:
+ if (type == v_rule_type) {
+ tex_aux_check_text_strut_rule(rule, text_style);
+ }
+ break;
+ case virtual_rule_code:
+ rule_virtual_width(rule) = rule_width(rule);
+ rule_virtual_height(rule) = rule_height(rule);
+ rule_virtual_depth(rule) = rule_depth(rule);
+ rule_width(rule) = 0;
+ rule_height(rule) = 0;
+ rule_depth(rule) = 0;
+ node_subtype(rule) = virtual_rule_subtype;
+ break;
}
return rule;
}
@@ -163,7 +191,7 @@ void tex_aux_check_math_strut_rule(halfword rule, halfword style)
scaled dp = rule_depth(rule);
if (ht == null_flag || dp == null_flag) {
halfword fnt = tex_get_rule_font(rule, style);
- halfword chr = rule_character(rule);
+ halfword chr = rule_strut_character(rule);
if (fnt > 0 && chr && tex_char_exists(fnt, chr)) {
if (ht == null_flag) {
ht = tex_math_font_char_ht(fnt, chr, style);
@@ -192,7 +220,7 @@ void tex_aux_check_text_strut_rule(halfword rule, halfword style)
scaled dp = rule_depth(rule);
if (ht == null_flag || dp == null_flag) {
halfword fnt = tex_get_rule_font(rule, style);
- halfword chr = rule_character(rule);
+ halfword chr = rule_strut_character(rule);
if (fnt > 0 && chr && tex_char_exists(fnt, chr)) {
scaledwhd whd = tex_char_whd_from_font(fnt, chr);
if (ht == null_flag) {
@@ -208,47 +236,81 @@ void tex_aux_check_text_strut_rule(halfword rule, halfword style)
halfword tex_get_rule_font(halfword n, halfword style)
{
- halfword fnt = rule_font(n);
- if (fnt >= rule_font_fam_offset) {
- halfword fam = fnt - rule_font_fam_offset;
- if (fam_par_in_range(fam)) {
- fnt = tex_fam_fnt(fam, tex_size_of_style(style));
+ if (node_subtype(n) == virtual_rule_subtype) {
+ halfword fnt = rule_strut_font(n);
+ if (fnt >= rule_font_fam_offset) {
+ halfword fam = fnt - rule_font_fam_offset;
+ if (fam_par_in_range(fam)) {
+ fnt = tex_fam_fnt(fam, tex_size_of_style(style));
+ }
+ }
+ if (fnt < 0 || fnt >= max_n_of_fonts) {
+ return null_font;
+ } else {
+ return fnt;
}
- }
- if (fnt < 0 || fnt >= max_n_of_fonts) {
+ } else {
return null_font;
- } else {
- return fnt;
}
}
halfword tex_get_rule_family(halfword n)
{
- halfword fnt = rule_font(n);
- if (fnt >= rule_font_fam_offset) {
- halfword fam = fnt - rule_font_fam_offset;
- if (fam_par_in_range(fam)) {
- return fam;
+ if (node_subtype(n) == virtual_rule_subtype) {
+ halfword fnt = rule_strut_font(n);
+ if (fnt >= rule_font_fam_offset) {
+ halfword fam = fnt - rule_font_fam_offset;
+ if (fam_par_in_range(fam)) {
+ return fam;
+ }
}
- }
- return 0;
+ }
+ return 0;
}
void tex_set_rule_font(halfword n, halfword fnt)
{
- if (fnt < 0 || fnt >= rule_font_fam_offset) {
- rule_font(n) = 0;
- } else {
- rule_font(n) = fnt;
+ if (node_subtype(n) == virtual_rule_subtype) {
+ if (fnt < 0 || fnt >= rule_font_fam_offset) {
+ rule_strut_font(n) = 0;
+ } else {
+ rule_strut_font(n) = fnt;
+ }
}
}
void tex_set_rule_family(halfword n, halfword fam)
{
- if (fam < 0 || fam >= max_n_of_math_families) {
- rule_font(n) = rule_font_fam_offset;
- } else {
- rule_font(n) = rule_font_fam_offset + fam;
+ if (node_subtype(n) == virtual_rule_subtype) {
+ if (fam < 0 || fam >= max_n_of_math_families) {
+ rule_strut_font(n) = rule_font_fam_offset;
+ } else {
+ rule_strut_font(n) = rule_font_fam_offset + fam;
+ }
+ }
+}
+
+halfword tex_get_rule_left(halfword n)
+{
+ return node_subtype(n) == virtual_rule_subtype ? 0 : rule_left(n);
+}
+
+halfword tex_get_rule_right(halfword n)
+{
+ return node_subtype(n) == virtual_rule_subtype ? 0 : rule_right(n);
+}
+
+void tex_set_rule_left(halfword n, halfword value)
+{
+ if (node_subtype(n) != virtual_rule_subtype) {
+ rule_left(n) = value;
+ }
+}
+
+void tex_set_rule_right(halfword n, halfword value)
+{
+ if (node_subtype(n) != virtual_rule_subtype) {
+ rule_right(n) = value;
}
}
diff --git a/source/luametatex/source/tex/texrules.h b/source/luametatex/source/tex/texrules.h
index 8a01ac847..444c6d645 100644
--- a/source/luametatex/source/tex/texrules.h
+++ b/source/luametatex/source/tex/texrules.h
@@ -24,4 +24,9 @@ extern halfword tex_get_rule_family (halfword n);
extern void tex_set_rule_font (halfword n, halfword fnt);
extern void tex_set_rule_family (halfword n, halfword fam);
+extern halfword tex_get_rule_left (halfword n);
+extern halfword tex_get_rule_right (halfword n);
+extern void tex_set_rule_left (halfword n, halfword value);
+extern void tex_set_rule_right (halfword n, halfword value);
+
# endif
diff --git a/source/luametatex/source/tex/texscanning.c b/source/luametatex/source/tex/texscanning.c
index 675186681..9559a4040 100644
--- a/source/luametatex/source/tex/texscanning.c
+++ b/source/luametatex/source/tex/texscanning.c
@@ -1513,7 +1513,7 @@ static halfword tex_aux_scan_something_internal(halfword cmd, halfword chr, int
case math_parameter_set_atom_rule:
case math_parameter_let_atom_rule:
case math_parameter_copy_atom_rule:
- case math_parameter_let_parent:
+ // case math_parameter_let_parent:
case math_parameter_copy_parent:
case math_parameter_set_defaults:
{
@@ -1521,6 +1521,15 @@ static halfword tex_aux_scan_something_internal(halfword cmd, halfword chr, int
// cur_val_level = int_val_level;
break;
}
+ case math_parameter_let_parent:
+ {
+ halfword mathclass = tex_scan_math_class_number(0);
+ if (valid_math_class_code(mathclass)) {
+ cur_val = tex_math_has_class_parent(mathclass);
+ cur_val_level = int_val_level;
+ }
+ break;
+ }
case math_parameter_set_pre_penalty:
case math_parameter_set_post_penalty:
case math_parameter_set_display_pre_penalty:
@@ -1893,16 +1902,6 @@ static halfword tex_aux_scan_something_internal(halfword cmd, halfword chr, int
}
break;
/*
- case string_cmd:
- {
- halfword head = str_toks(str_lstring(cs_offset_value + chr), NULL);
- begin_inserted_list(head);
- cur_val = 0;
- cur_val_level = no_val_level;
- break;
- }
- */
- /*
case special_box_cmd:
switch (chr) {
case left_box_code:
@@ -3292,6 +3291,14 @@ halfword tex_the_value_toks(int code, halfword *tail, halfword property) /* mayb
return null;
}
+void tex_detokenize_list(halfword head)
+{
+ int saved_selector;
+ push_selector;
+ tex_show_token_list(head, 0);
+ pop_selector;
+}
+
halfword tex_the_detokenized_toks(halfword *tail)
{
halfword head = tex_scan_general_text(tail);
diff --git a/source/luametatex/source/tex/texscanning.h b/source/luametatex/source/tex/texscanning.h
index 34e118d93..ed11b8f4c 100644
--- a/source/luametatex/source/tex/texscanning.h
+++ b/source/luametatex/source/tex/texscanning.h
@@ -131,7 +131,8 @@ extern halfword tex_scan_bytecode_reference (int optional_equal);
extern halfword tex_the_value_toks (int unit, halfword *tail, halfword property); /* returns head */
extern halfword tex_the_toks (int code, halfword *tail); /* returns head */
-extern halfword tex_the_detokenized_toks (halfword *head);
+extern halfword tex_the_detokenized_toks (halfword *tail);
+extern void tex_detokenize_list (halfword head);
extern strnumber tex_the_scanned_result (void);
extern void tex_set_font_dimen (void);
diff --git a/source/luametatex/source/tex/textoken.c b/source/luametatex/source/tex/textoken.c
index e3aa90c0f..7580d72d3 100644
--- a/source/luametatex/source/tex/textoken.c
+++ b/source/luametatex/source/tex/textoken.c
@@ -2942,6 +2942,7 @@ void tex_run_convert_tokens(halfword code)
break;
*/
case detokenized_code:
+ /*tex Sort of like |\meaningles| but without the explanationart text. */
{
int saved_selector;
int saved_scanner_status = lmt_input_state.scanner_status;
@@ -2956,6 +2957,60 @@ void tex_run_convert_tokens(halfword code)
pop_selector;
break;
}
+ case detokened_code:
+ /*tex Takes a control sequence or token list. Probably a bad name but so be it. */
+ {
+ int saved_selector;
+ int saved_scanner_status = lmt_input_state.scanner_status;
+ halfword list = null;
+ lmt_input_state.scanner_status = scanner_is_normal;
+ tex_get_token();
+ lmt_input_state.scanner_status = saved_scanner_status;
+ switch (cur_cmd) {
+ case call_cmd:
+ case protected_call_cmd:
+ case semi_protected_call_cmd:
+ case tolerant_call_cmd:
+ case tolerant_protected_call_cmd:
+ case tolerant_semi_protected_call_cmd:
+ if (cur_chr) {
+ /* We only serialize macros with no arguments. */
+ list = token_link(cur_chr);
+ break;
+ } else {
+ goto WHATEVER;
+ }
+ case internal_toks_cmd:
+ case register_toks_cmd:
+ list = token_link(eq_value(cur_chr));
+ break;
+ case register_cmd:
+ if (cur_chr == tok_val_level) {
+ halfword n = tex_scan_toks_register_number();
+ list = token_link(toks_register(n));
+ break;
+ } else {
+ goto WHATEVER;
+ }
+ break;
+ default:
+ WHATEVER:
+ {
+ halfword t = tex_get_available_token(cur_tok);
+ push_selector;
+ tex_show_token_list(t, 0);
+ pop_selector;
+ tex_put_available_token(t);
+ }
+ break;
+ }
+ if (list) {
+ push_selector;
+ tex_show_token_list(list, 2);
+ pop_selector;
+ }
+ break;
+ }
case roman_numeral_code:
{
int saved_selector;
@@ -3304,7 +3359,7 @@ char *tex_tokenlist_to_tstring(int pp, int inhibit_par, int *siz, int skippreamb
} else {
int infop = token_info(p);
if (infop < 0) {
- /* unlikely, will go after checking */
+ /*tex Unlikely, will go after checking (maybe \LUA\ user mess up). */
tex_aux_append_str_to_buffer(error_string_bad(32));
} else if (infop < cs_token_flag) {
/*tex We nearly always end up here because otherwise we have an error. */
@@ -3370,7 +3425,7 @@ char *tex_tokenlist_to_tstring(int pp, int inhibit_par, int *siz, int skippreamb
}
break;
case end_match_cmd:
- if (skippreamble ==2) {
+ if (skippreamble == 2) {
goto EXIT;
} else if (chr == 0) {
if (! skip) {
@@ -3380,15 +3435,6 @@ char *tex_tokenlist_to_tstring(int pp, int inhibit_par, int *siz, int skippreamb
skip = 0 ;
}
break;
- /*
- case string_cmd:
- c = c + cs_offset_value;
- do_make_room((int) str_length(c));
- for (int i = 0; i < str_length(c); i++) {
- token_state.buffer[token_state.bufloc++] = str_string(c)[i];
- }
- break;
- */
case end_paragraph_cmd:
if (! inhibit_par && (auto_paragraph_mode(auto_paragraph_text))) {
tex_aux_append_esc_to_buffer("par");
diff --git a/source/luametatex/source/tex/textypes.h b/source/luametatex/source/tex/textypes.h
index d2bb77972..1e83a975f 100644
--- a/source/luametatex/source/tex/textypes.h
+++ b/source/luametatex/source/tex/textypes.h
@@ -170,6 +170,7 @@ extern halfword tex_badness(
# define semi_loose_criterium 12 /* same as |decent_criterium| */
# define decent_criterium 12
# define semi_tight_criterium 12 /* same as |decent_criterium| */
+# define max_calculated_badness 8189
# define default_rule 26214 /*tex 0.4pt */
# define ignore_depth -65536000 /*tex The magic dimension value to mean \quote {ignore me}: -1000pt */
diff --git a/source/luametatex/source/utilities/auxposit.h b/source/luametatex/source/utilities/auxposit.h
index 6abeae2c9..3975bafb7 100644
--- a/source/luametatex/source/utilities/auxposit.h
+++ b/source/luametatex/source/utilities/auxposit.h
@@ -47,6 +47,11 @@ typedef posit32_t *posit;
# define posit_is_NaR isNaRP32UI
# define posit_eq_zero(a) (a.v == 0)
+# define posit_le_zero(a) (a.v <= 0)
+# define posit_lt_zero(a) (a.v < 0)
+# define posit_gt_zero(a) (a.v > 0)
+# define posit_ge_zero(a) (a.v >= 0)
+# define posit_ne_zero(a) (a.v != 0)
inline static posit_t posit_neg(posit_t a) { posit_t p ; p.v = -a.v & 0xFFFFFFFF; return p; }
inline static posit_t posit_abs(posit_t a) { posit_t p ; int mask = a.v >> 31; p.v = ((a.v + mask) ^ mask) & 0xFFFFFFFF; return p; }
@@ -119,7 +124,12 @@ typedef posit32_t tex_posit;
# define tex_posit_is_NaR(p) posit_is_NaR((tex_posit) { .v = (uint32_t) p })
-# define tex_posit_eq_zero(p) posit_eq_zero((tex_posit) { .v = (uint32_t) p })
+# define tex_posit_eq_zero(p) posit_eq_zero((tex_posit) { .v = (uint32_t) p })
+# define tex_posit_le_zero(p) posit_le_zero((tex_posit) { .v = (uint32_t) p })
+# define tex_posit_lt_zero(p) posit_lt_zero((tex_posit) { .v = (uint32_t) p })
+# define tex_posit_gt_zero(p) posit_gt_zero((tex_posit) { .v = (uint32_t) p })
+# define tex_posit_ge_zero(p) posit_ge_zero((tex_posit) { .v = (uint32_t) p })
+# define tex_posit_ne_zero(p) posit_ne_zero((tex_posit) { .v = (uint32_t) p })
inline static halfword tex_posit_neg(halfword a)
{