summaryrefslogtreecommitdiff
path: root/context/data/textadept
diff options
context:
space:
mode:
Diffstat (limited to 'context/data/textadept')
-rw-r--r--context/data/textadept/context/data/scite-context-data-bidi.lua10357
-rw-r--r--context/data/textadept/context/data/scite-context-data-context.lua4
-rw-r--r--context/data/textadept/context/data/scite-context-data-interfaces.lua4
-rw-r--r--context/data/textadept/context/data/scite-context-data-metafun.lua4
-rw-r--r--context/data/textadept/context/data/scite-context-data-metapost.lua9
-rw-r--r--context/data/textadept/context/data/scite-context-data-tex.lua9
-rw-r--r--context/data/textadept/context/init.lua147
-rw-r--r--context/data/textadept/context/lexers/lexer.lua2686
-rw-r--r--context/data/textadept/context/lexers/lexer.rme1
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-bibtex.lua195
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-bidi.lua598
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-bnf.lua99
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-cld.lua23
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-cpp-web.lua23
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-cpp.lua199
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-dummy.lua35
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-json.lua101
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-lua-longstring.lua31
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-lua.lua396
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-mps.lua189
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-pdf-object.lua136
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-pdf-xref.lua43
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-pdf.lua218
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-sas.lua102
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-sql.lua238
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-tex-web.lua23
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-tex.lua588
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-txt.lua80
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-web-snippets.lua132
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-web.lua67
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-xml-cdata.lua33
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-xml-comment.lua33
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-xml-script.lua33
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer-xml.lua350
-rw-r--r--context/data/textadept/context/lexers/scite-context-lexer.lua2686
-rw-r--r--context/data/textadept/context/lexers/text.lua35
-rw-r--r--context/data/textadept/context/modules/textadept-context-files.lua826
-rw-r--r--context/data/textadept/context/modules/textadept-context-runner.lua1100
-rw-r--r--context/data/textadept/context/modules/textadept-context-settings.lua152
-rw-r--r--context/data/textadept/context/modules/textadept-context-types.lua175
-rw-r--r--context/data/textadept/context/textadept-context.cmd56
-rw-r--r--context/data/textadept/context/textadept-context.sh12
-rw-r--r--context/data/textadept/context/themes/scite-context-theme.lua159
43 files changed, 0 insertions, 22387 deletions
diff --git a/context/data/textadept/context/data/scite-context-data-bidi.lua b/context/data/textadept/context/data/scite-context-data-bidi.lua
deleted file mode 100644
index 4221dee89..000000000
--- a/context/data/textadept/context/data/scite-context-data-bidi.lua
+++ /dev/null
@@ -1,10357 +0,0 @@
-return {
- ["comment"]="generated by: mtxrun -- script interface.lua --bidi",
- ["directions"]={
- [0]="bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "s",
- "b",
- "s",
- "ws",
- "b",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "bn",
- "b",
- "b",
- "b",
- "s",
- "ws",
- "on",
- "on",
- "et",
- "et",
- "et",
- "on",
- "on",
- "on",
- "on",
- "on",
- "es",
- "cs",
- "es",
- "cs",
- "cs",
- "en",
- "en",
- "en",
- "en",
- "en",
- "en",
- "en",
- "en",
- "en",
- "en",
- "cs",
- "on",
- "on",
- "on",
- "on",
- "on",
- "on",
- [91]="on",
- [92]="on",
- [93]="on",
- [94]="on",
- [95]="on",
- [96]="on",
- [123]="on",
- [124]="on",
- [125]="on",
- [126]="on",
- [127]="bn",
- [128]="bn",
- [129]="bn",
- [130]="bn",
- [131]="bn",
- [132]="bn",
- [133]="b",
- [134]="bn",
- [135]="bn",
- [136]="bn",
- [137]="bn",
- [138]="bn",
- [139]="bn",
- [140]="bn",
- [141]="bn",
- [142]="bn",
- [143]="bn",
- [144]="bn",
- [145]="bn",
- [146]="bn",
- [147]="bn",
- [148]="bn",
- [149]="bn",
- [150]="bn",
- [151]="bn",
- [152]="bn",
- [153]="bn",
- [154]="bn",
- [155]="bn",
- [156]="bn",
- [157]="bn",
- [158]="bn",
- [159]="bn",
- [160]="cs",
- [161]="on",
- [162]="et",
- [163]="et",
- [164]="et",
- [165]="et",
- [166]="on",
- [167]="on",
- [168]="on",
- [169]="on",
- [171]="on",
- [172]="on",
- [173]="bn",
- [174]="on",
- [175]="on",
- [176]="et",
- [177]="et",
- [178]="en",
- [179]="en",
- [180]="on",
- [182]="on",
- [183]="on",
- [184]="on",
- [185]="en",
- [187]="on",
- [188]="on",
- [189]="on",
- [190]="on",
- [191]="on",
- [215]="on",
- [247]="on",
- [697]="on",
- [698]="on",
- [706]="on",
- [707]="on",
- [708]="on",
- [709]="on",
- [710]="on",
- [711]="on",
- [712]="on",
- [713]="on",
- [714]="on",
- [715]="on",
- [716]="on",
- [717]="on",
- [718]="on",
- [719]="on",
- [722]="on",
- [723]="on",
- [724]="on",
- [725]="on",
- [726]="on",
- [727]="on",
- [728]="on",
- [729]="on",
- [730]="on",
- [731]="on",
- [732]="on",
- [733]="on",
- [734]="on",
- [735]="on",
- [741]="on",
- [742]="on",
- [743]="on",
- [744]="on",
- [745]="on",
- [746]="on",
- [747]="on",
- [748]="on",
- [749]="on",
- [751]="on",
- [752]="on",
- [753]="on",
- [754]="on",
- [755]="on",
- [756]="on",
- [757]="on",
- [758]="on",
- [759]="on",
- [760]="on",
- [761]="on",
- [762]="on",
- [763]="on",
- [764]="on",
- [765]="on",
- [766]="on",
- [767]="on",
- [768]="nsm",
- [769]="nsm",
- [770]="nsm",
- [771]="nsm",
- [772]="nsm",
- [773]="nsm",
- [774]="nsm",
- [775]="nsm",
- [776]="nsm",
- [777]="nsm",
- [778]="nsm",
- [779]="nsm",
- [780]="nsm",
- [781]="nsm",
- [782]="nsm",
- [783]="nsm",
- [784]="nsm",
- [785]="nsm",
- [786]="nsm",
- [787]="nsm",
- [788]="nsm",
- [789]="nsm",
- [790]="nsm",
- [791]="nsm",
- [792]="nsm",
- [793]="nsm",
- [794]="nsm",
- [795]="nsm",
- [796]="nsm",
- [797]="nsm",
- [798]="nsm",
- [799]="nsm",
- [800]="nsm",
- [801]="nsm",
- [802]="nsm",
- [803]="nsm",
- [804]="nsm",
- [805]="nsm",
- [806]="nsm",
- [807]="nsm",
- [808]="nsm",
- [809]="nsm",
- [810]="nsm",
- [811]="nsm",
- [812]="nsm",
- [813]="nsm",
- [814]="nsm",
- [815]="nsm",
- [816]="nsm",
- [817]="nsm",
- [818]="nsm",
- [819]="nsm",
- [820]="nsm",
- [821]="nsm",
- [822]="nsm",
- [823]="nsm",
- [824]="nsm",
- [825]="nsm",
- [826]="nsm",
- [827]="nsm",
- [828]="nsm",
- [829]="nsm",
- [830]="nsm",
- [831]="nsm",
- [832]="nsm",
- [833]="nsm",
- [834]="nsm",
- [835]="nsm",
- [836]="nsm",
- [837]="nsm",
- [838]="nsm",
- [839]="nsm",
- [840]="nsm",
- [841]="nsm",
- [842]="nsm",
- [843]="nsm",
- [844]="nsm",
- [845]="nsm",
- [846]="nsm",
- [847]="nsm",
- [848]="nsm",
- [849]="nsm",
- [850]="nsm",
- [851]="nsm",
- [852]="nsm",
- [853]="nsm",
- [854]="nsm",
- [855]="nsm",
- [856]="nsm",
- [857]="nsm",
- [858]="nsm",
- [859]="nsm",
- [860]="nsm",
- [861]="nsm",
- [862]="nsm",
- [863]="nsm",
- [864]="nsm",
- [865]="nsm",
- [866]="nsm",
- [867]="nsm",
- [868]="nsm",
- [869]="nsm",
- [870]="nsm",
- [871]="nsm",
- [872]="nsm",
- [873]="nsm",
- [874]="nsm",
- [875]="nsm",
- [876]="nsm",
- [877]="nsm",
- [878]="nsm",
- [879]="nsm",
- [884]="on",
- [885]="on",
- [894]="on",
- [900]="on",
- [901]="on",
- [903]="on",
- [1014]="on",
- [1155]="nsm",
- [1156]="nsm",
- [1157]="nsm",
- [1158]="nsm",
- [1159]="nsm",
- [1160]="nsm",
- [1161]="nsm",
- [1418]="on",
- [1421]="on",
- [1422]="on",
- [1423]="et",
- [1425]="nsm",
- [1426]="nsm",
- [1427]="nsm",
- [1428]="nsm",
- [1429]="nsm",
- [1430]="nsm",
- [1431]="nsm",
- [1432]="nsm",
- [1433]="nsm",
- [1434]="nsm",
- [1435]="nsm",
- [1436]="nsm",
- [1437]="nsm",
- [1438]="nsm",
- [1439]="nsm",
- [1440]="nsm",
- [1441]="nsm",
- [1442]="nsm",
- [1443]="nsm",
- [1444]="nsm",
- [1445]="nsm",
- [1446]="nsm",
- [1447]="nsm",
- [1448]="nsm",
- [1449]="nsm",
- [1450]="nsm",
- [1451]="nsm",
- [1452]="nsm",
- [1453]="nsm",
- [1454]="nsm",
- [1455]="nsm",
- [1456]="nsm",
- [1457]="nsm",
- [1458]="nsm",
- [1459]="nsm",
- [1460]="nsm",
- [1461]="nsm",
- [1462]="nsm",
- [1463]="nsm",
- [1464]="nsm",
- [1465]="nsm",
- [1466]="nsm",
- [1467]="nsm",
- [1468]="nsm",
- [1469]="nsm",
- [1470]="r",
- [1471]="nsm",
- [1472]="r",
- [1473]="nsm",
- [1474]="nsm",
- [1475]="r",
- [1476]="nsm",
- [1477]="nsm",
- [1478]="r",
- [1479]="nsm",
- [1488]="r",
- [1489]="r",
- [1490]="r",
- [1491]="r",
- [1492]="r",
- [1493]="r",
- [1494]="r",
- [1495]="r",
- [1496]="r",
- [1497]="r",
- [1498]="r",
- [1499]="r",
- [1500]="r",
- [1501]="r",
- [1502]="r",
- [1503]="r",
- [1504]="r",
- [1505]="r",
- [1506]="r",
- [1507]="r",
- [1508]="r",
- [1509]="r",
- [1510]="r",
- [1511]="r",
- [1512]="r",
- [1513]="r",
- [1514]="r",
- [1520]="r",
- [1521]="r",
- [1522]="r",
- [1523]="r",
- [1524]="r",
- [1536]="an",
- [1537]="an",
- [1538]="an",
- [1539]="an",
- [1540]="an",
- [1541]="an",
- [1542]="on",
- [1543]="on",
- [1544]="al",
- [1545]="et",
- [1546]="et",
- [1547]="al",
- [1548]="cs",
- [1549]="al",
- [1550]="on",
- [1551]="on",
- [1552]="nsm",
- [1553]="nsm",
- [1554]="nsm",
- [1555]="nsm",
- [1556]="nsm",
- [1557]="nsm",
- [1558]="nsm",
- [1559]="nsm",
- [1560]="nsm",
- [1561]="nsm",
- [1562]="nsm",
- [1563]="al",
- [1564]="al",
- [1566]="al",
- [1567]="al",
- [1568]="al",
- [1569]="al",
- [1570]="al",
- [1571]="al",
- [1572]="al",
- [1573]="al",
- [1574]="al",
- [1575]="al",
- [1576]="al",
- [1577]="al",
- [1578]="al",
- [1579]="al",
- [1580]="al",
- [1581]="al",
- [1582]="al",
- [1583]="al",
- [1584]="al",
- [1585]="al",
- [1586]="al",
- [1587]="al",
- [1588]="al",
- [1589]="al",
- [1590]="al",
- [1591]="al",
- [1592]="al",
- [1593]="al",
- [1594]="al",
- [1595]="al",
- [1596]="al",
- [1597]="al",
- [1598]="al",
- [1599]="al",
- [1600]="al",
- [1601]="al",
- [1602]="al",
- [1603]="al",
- [1604]="al",
- [1605]="al",
- [1606]="al",
- [1607]="al",
- [1608]="al",
- [1609]="al",
- [1610]="al",
- [1611]="nsm",
- [1612]="nsm",
- [1613]="nsm",
- [1614]="nsm",
- [1615]="nsm",
- [1616]="nsm",
- [1617]="nsm",
- [1618]="nsm",
- [1619]="nsm",
- [1620]="nsm",
- [1621]="nsm",
- [1622]="nsm",
- [1623]="nsm",
- [1624]="nsm",
- [1625]="nsm",
- [1626]="nsm",
- [1627]="nsm",
- [1628]="nsm",
- [1629]="nsm",
- [1630]="nsm",
- [1631]="nsm",
- [1632]="an",
- [1633]="an",
- [1634]="an",
- [1635]="an",
- [1636]="an",
- [1637]="an",
- [1638]="an",
- [1639]="an",
- [1640]="an",
- [1641]="an",
- [1642]="et",
- [1643]="an",
- [1644]="an",
- [1645]="al",
- [1646]="al",
- [1647]="al",
- [1648]="nsm",
- [1649]="al",
- [1650]="al",
- [1651]="al",
- [1652]="al",
- [1653]="al",
- [1654]="al",
- [1655]="al",
- [1656]="al",
- [1657]="al",
- [1658]="al",
- [1659]="al",
- [1660]="al",
- [1661]="al",
- [1662]="al",
- [1663]="al",
- [1664]="al",
- [1665]="al",
- [1666]="al",
- [1667]="al",
- [1668]="al",
- [1669]="al",
- [1670]="al",
- [1671]="al",
- [1672]="al",
- [1673]="al",
- [1674]="al",
- [1675]="al",
- [1676]="al",
- [1677]="al",
- [1678]="al",
- [1679]="al",
- [1680]="al",
- [1681]="al",
- [1682]="al",
- [1683]="al",
- [1684]="al",
- [1685]="al",
- [1686]="al",
- [1687]="al",
- [1688]="al",
- [1689]="al",
- [1690]="al",
- [1691]="al",
- [1692]="al",
- [1693]="al",
- [1694]="al",
- [1695]="al",
- [1696]="al",
- [1697]="al",
- [1698]="al",
- [1699]="al",
- [1700]="al",
- [1701]="al",
- [1702]="al",
- [1703]="al",
- [1704]="al",
- [1705]="al",
- [1706]="al",
- [1707]="al",
- [1708]="al",
- [1709]="al",
- [1710]="al",
- [1711]="al",
- [1712]="al",
- [1713]="al",
- [1714]="al",
- [1715]="al",
- [1716]="al",
- [1717]="al",
- [1718]="al",
- [1719]="al",
- [1720]="al",
- [1721]="al",
- [1722]="al",
- [1723]="al",
- [1724]="al",
- [1725]="al",
- [1726]="al",
- [1727]="al",
- [1728]="al",
- [1729]="al",
- [1730]="al",
- [1731]="al",
- [1732]="al",
- [1733]="al",
- [1734]="al",
- [1735]="al",
- [1736]="al",
- [1737]="al",
- [1738]="al",
- [1739]="al",
- [1740]="al",
- [1741]="al",
- [1742]="al",
- [1743]="al",
- [1744]="al",
- [1745]="al",
- [1746]="al",
- [1747]="al",
- [1748]="al",
- [1749]="al",
- [1750]="nsm",
- [1751]="nsm",
- [1752]="nsm",
- [1753]="nsm",
- [1754]="nsm",
- [1755]="nsm",
- [1756]="nsm",
- [1757]="an",
- [1758]="on",
- [1759]="nsm",
- [1760]="nsm",
- [1761]="nsm",
- [1762]="nsm",
- [1763]="nsm",
- [1764]="nsm",
- [1765]="al",
- [1766]="al",
- [1767]="nsm",
- [1768]="nsm",
- [1769]="on",
- [1770]="nsm",
- [1771]="nsm",
- [1772]="nsm",
- [1773]="nsm",
- [1774]="al",
- [1775]="al",
- [1776]="en",
- [1777]="en",
- [1778]="en",
- [1779]="en",
- [1780]="en",
- [1781]="en",
- [1782]="en",
- [1783]="en",
- [1784]="en",
- [1785]="en",
- [1786]="al",
- [1787]="al",
- [1788]="al",
- [1789]="al",
- [1790]="al",
- [1791]="al",
- [1792]="al",
- [1793]="al",
- [1794]="al",
- [1795]="al",
- [1796]="al",
- [1797]="al",
- [1798]="al",
- [1799]="al",
- [1800]="al",
- [1801]="al",
- [1802]="al",
- [1803]="al",
- [1804]="al",
- [1805]="al",
- [1807]="al",
- [1808]="al",
- [1809]="nsm",
- [1810]="al",
- [1811]="al",
- [1812]="al",
- [1813]="al",
- [1814]="al",
- [1815]="al",
- [1816]="al",
- [1817]="al",
- [1818]="al",
- [1819]="al",
- [1820]="al",
- [1821]="al",
- [1822]="al",
- [1823]="al",
- [1824]="al",
- [1825]="al",
- [1826]="al",
- [1827]="al",
- [1828]="al",
- [1829]="al",
- [1830]="al",
- [1831]="al",
- [1832]="al",
- [1833]="al",
- [1834]="al",
- [1835]="al",
- [1836]="al",
- [1837]="al",
- [1838]="al",
- [1839]="al",
- [1840]="nsm",
- [1841]="nsm",
- [1842]="nsm",
- [1843]="nsm",
- [1844]="nsm",
- [1845]="nsm",
- [1846]="nsm",
- [1847]="nsm",
- [1848]="nsm",
- [1849]="nsm",
- [1850]="nsm",
- [1851]="nsm",
- [1852]="nsm",
- [1853]="nsm",
- [1854]="nsm",
- [1855]="nsm",
- [1856]="nsm",
- [1857]="nsm",
- [1858]="nsm",
- [1859]="nsm",
- [1860]="nsm",
- [1861]="nsm",
- [1862]="nsm",
- [1863]="nsm",
- [1864]="nsm",
- [1865]="nsm",
- [1866]="nsm",
- [1869]="al",
- [1870]="al",
- [1871]="al",
- [1872]="al",
- [1873]="al",
- [1874]="al",
- [1875]="al",
- [1876]="al",
- [1877]="al",
- [1878]="al",
- [1879]="al",
- [1880]="al",
- [1881]="al",
- [1882]="al",
- [1883]="al",
- [1884]="al",
- [1885]="al",
- [1886]="al",
- [1887]="al",
- [1888]="al",
- [1889]="al",
- [1890]="al",
- [1891]="al",
- [1892]="al",
- [1893]="al",
- [1894]="al",
- [1895]="al",
- [1896]="al",
- [1897]="al",
- [1898]="al",
- [1899]="al",
- [1900]="al",
- [1901]="al",
- [1902]="al",
- [1903]="al",
- [1904]="al",
- [1905]="al",
- [1906]="al",
- [1907]="al",
- [1908]="al",
- [1909]="al",
- [1910]="al",
- [1911]="al",
- [1912]="al",
- [1913]="al",
- [1914]="al",
- [1915]="al",
- [1916]="al",
- [1917]="al",
- [1918]="al",
- [1919]="al",
- [1920]="al",
- [1921]="al",
- [1922]="al",
- [1923]="al",
- [1924]="al",
- [1925]="al",
- [1926]="al",
- [1927]="al",
- [1928]="al",
- [1929]="al",
- [1930]="al",
- [1931]="al",
- [1932]="al",
- [1933]="al",
- [1934]="al",
- [1935]="al",
- [1936]="al",
- [1937]="al",
- [1938]="al",
- [1939]="al",
- [1940]="al",
- [1941]="al",
- [1942]="al",
- [1943]="al",
- [1944]="al",
- [1945]="al",
- [1946]="al",
- [1947]="al",
- [1948]="al",
- [1949]="al",
- [1950]="al",
- [1951]="al",
- [1952]="al",
- [1953]="al",
- [1954]="al",
- [1955]="al",
- [1956]="al",
- [1957]="al",
- [1958]="nsm",
- [1959]="nsm",
- [1960]="nsm",
- [1961]="nsm",
- [1962]="nsm",
- [1963]="nsm",
- [1964]="nsm",
- [1965]="nsm",
- [1966]="nsm",
- [1967]="nsm",
- [1968]="nsm",
- [1969]="al",
- [1984]="r",
- [1985]="r",
- [1986]="r",
- [1987]="r",
- [1988]="r",
- [1989]="r",
- [1990]="r",
- [1991]="r",
- [1992]="r",
- [1993]="r",
- [1994]="r",
- [1995]="r",
- [1996]="r",
- [1997]="r",
- [1998]="r",
- [1999]="r",
- [2000]="r",
- [2001]="r",
- [2002]="r",
- [2003]="r",
- [2004]="r",
- [2005]="r",
- [2006]="r",
- [2007]="r",
- [2008]="r",
- [2009]="r",
- [2010]="r",
- [2011]="r",
- [2012]="r",
- [2013]="r",
- [2014]="r",
- [2015]="r",
- [2016]="r",
- [2017]="r",
- [2018]="r",
- [2019]="r",
- [2020]="r",
- [2021]="r",
- [2022]="r",
- [2023]="r",
- [2024]="r",
- [2025]="r",
- [2026]="r",
- [2027]="nsm",
- [2028]="nsm",
- [2029]="nsm",
- [2030]="nsm",
- [2031]="nsm",
- [2032]="nsm",
- [2033]="nsm",
- [2034]="nsm",
- [2035]="nsm",
- [2036]="r",
- [2037]="r",
- [2038]="on",
- [2039]="on",
- [2040]="on",
- [2041]="on",
- [2042]="r",
- [2048]="r",
- [2049]="r",
- [2050]="r",
- [2051]="r",
- [2052]="r",
- [2053]="r",
- [2054]="r",
- [2055]="r",
- [2056]="r",
- [2057]="r",
- [2058]="r",
- [2059]="r",
- [2060]="r",
- [2061]="r",
- [2062]="r",
- [2063]="r",
- [2064]="r",
- [2065]="r",
- [2066]="r",
- [2067]="r",
- [2068]="r",
- [2069]="r",
- [2070]="nsm",
- [2071]="nsm",
- [2072]="nsm",
- [2073]="nsm",
- [2074]="r",
- [2075]="nsm",
- [2076]="nsm",
- [2077]="nsm",
- [2078]="nsm",
- [2079]="nsm",
- [2080]="nsm",
- [2081]="nsm",
- [2082]="nsm",
- [2083]="nsm",
- [2084]="r",
- [2085]="nsm",
- [2086]="nsm",
- [2087]="nsm",
- [2088]="r",
- [2089]="nsm",
- [2090]="nsm",
- [2091]="nsm",
- [2092]="nsm",
- [2093]="nsm",
- [2096]="r",
- [2097]="r",
- [2098]="r",
- [2099]="r",
- [2100]="r",
- [2101]="r",
- [2102]="r",
- [2103]="r",
- [2104]="r",
- [2105]="r",
- [2106]="r",
- [2107]="r",
- [2108]="r",
- [2109]="r",
- [2110]="r",
- [2112]="r",
- [2113]="r",
- [2114]="r",
- [2115]="r",
- [2116]="r",
- [2117]="r",
- [2118]="r",
- [2119]="r",
- [2120]="r",
- [2121]="r",
- [2122]="r",
- [2123]="r",
- [2124]="r",
- [2125]="r",
- [2126]="r",
- [2127]="r",
- [2128]="r",
- [2129]="r",
- [2130]="r",
- [2131]="r",
- [2132]="r",
- [2133]="r",
- [2134]="r",
- [2135]="r",
- [2136]="r",
- [2137]="nsm",
- [2138]="nsm",
- [2139]="nsm",
- [2142]="r",
- [2144]="al",
- [2145]="al",
- [2146]="al",
- [2147]="al",
- [2148]="al",
- [2149]="al",
- [2150]="al",
- [2151]="al",
- [2152]="al",
- [2153]="al",
- [2154]="al",
- [2208]="al",
- [2209]="al",
- [2210]="al",
- [2211]="al",
- [2212]="al",
- [2213]="al",
- [2214]="al",
- [2215]="al",
- [2216]="al",
- [2217]="al",
- [2218]="al",
- [2219]="al",
- [2220]="al",
- [2221]="al",
- [2222]="al",
- [2223]="al",
- [2224]="al",
- [2225]="al",
- [2226]="al",
- [2227]="al",
- [2228]="al",
- [2230]="al",
- [2231]="al",
- [2232]="al",
- [2233]="al",
- [2234]="al",
- [2235]="al",
- [2236]="al",
- [2237]="al",
- [2260]="nsm",
- [2261]="nsm",
- [2262]="nsm",
- [2263]="nsm",
- [2264]="nsm",
- [2265]="nsm",
- [2266]="nsm",
- [2267]="nsm",
- [2268]="nsm",
- [2269]="nsm",
- [2270]="nsm",
- [2271]="nsm",
- [2272]="nsm",
- [2273]="nsm",
- [2274]="an",
- [2275]="nsm",
- [2276]="nsm",
- [2277]="nsm",
- [2278]="nsm",
- [2279]="nsm",
- [2280]="nsm",
- [2281]="nsm",
- [2282]="nsm",
- [2283]="nsm",
- [2284]="nsm",
- [2285]="nsm",
- [2286]="nsm",
- [2287]="nsm",
- [2288]="nsm",
- [2289]="nsm",
- [2290]="nsm",
- [2291]="nsm",
- [2292]="nsm",
- [2293]="nsm",
- [2294]="nsm",
- [2295]="nsm",
- [2296]="nsm",
- [2297]="nsm",
- [2298]="nsm",
- [2299]="nsm",
- [2300]="nsm",
- [2301]="nsm",
- [2302]="nsm",
- [2303]="nsm",
- [2304]="nsm",
- [2305]="nsm",
- [2306]="nsm",
- [2362]="nsm",
- [2364]="nsm",
- [2369]="nsm",
- [2370]="nsm",
- [2371]="nsm",
- [2372]="nsm",
- [2373]="nsm",
- [2374]="nsm",
- [2375]="nsm",
- [2376]="nsm",
- [2381]="nsm",
- [2385]="nsm",
- [2386]="nsm",
- [2387]="nsm",
- [2388]="nsm",
- [2389]="nsm",
- [2390]="nsm",
- [2391]="nsm",
- [2402]="nsm",
- [2403]="nsm",
- [2433]="nsm",
- [2492]="nsm",
- [2497]="nsm",
- [2498]="nsm",
- [2499]="nsm",
- [2500]="nsm",
- [2509]="nsm",
- [2530]="nsm",
- [2531]="nsm",
- [2546]="et",
- [2547]="et",
- [2555]="et",
- [2561]="nsm",
- [2562]="nsm",
- [2620]="nsm",
- [2625]="nsm",
- [2626]="nsm",
- [2631]="nsm",
- [2632]="nsm",
- [2635]="nsm",
- [2636]="nsm",
- [2637]="nsm",
- [2641]="nsm",
- [2672]="nsm",
- [2673]="nsm",
- [2677]="nsm",
- [2689]="nsm",
- [2690]="nsm",
- [2748]="nsm",
- [2753]="nsm",
- [2754]="nsm",
- [2755]="nsm",
- [2756]="nsm",
- [2757]="nsm",
- [2759]="nsm",
- [2760]="nsm",
- [2765]="nsm",
- [2786]="nsm",
- [2787]="nsm",
- [2801]="et",
- [2810]="nsm",
- [2811]="nsm",
- [2812]="nsm",
- [2813]="nsm",
- [2814]="nsm",
- [2815]="nsm",
- [2817]="nsm",
- [2876]="nsm",
- [2879]="nsm",
- [2881]="nsm",
- [2882]="nsm",
- [2883]="nsm",
- [2884]="nsm",
- [2893]="nsm",
- [2902]="nsm",
- [2914]="nsm",
- [2915]="nsm",
- [2946]="nsm",
- [3008]="nsm",
- [3021]="nsm",
- [3059]="on",
- [3060]="on",
- [3061]="on",
- [3062]="on",
- [3063]="on",
- [3064]="on",
- [3065]="et",
- [3066]="on",
- [3072]="nsm",
- [3134]="nsm",
- [3135]="nsm",
- [3136]="nsm",
- [3142]="nsm",
- [3143]="nsm",
- [3144]="nsm",
- [3146]="nsm",
- [3147]="nsm",
- [3148]="nsm",
- [3149]="nsm",
- [3157]="nsm",
- [3158]="nsm",
- [3170]="nsm",
- [3171]="nsm",
- [3192]="on",
- [3193]="on",
- [3194]="on",
- [3195]="on",
- [3196]="on",
- [3197]="on",
- [3198]="on",
- [3201]="nsm",
- [3260]="nsm",
- [3276]="nsm",
- [3277]="nsm",
- [3298]="nsm",
- [3299]="nsm",
- [3328]="nsm",
- [3329]="nsm",
- [3387]="nsm",
- [3388]="nsm",
- [3393]="nsm",
- [3394]="nsm",
- [3395]="nsm",
- [3396]="nsm",
- [3405]="nsm",
- [3426]="nsm",
- [3427]="nsm",
- [3530]="nsm",
- [3538]="nsm",
- [3539]="nsm",
- [3540]="nsm",
- [3542]="nsm",
- [3633]="nsm",
- [3636]="nsm",
- [3637]="nsm",
- [3638]="nsm",
- [3639]="nsm",
- [3640]="nsm",
- [3641]="nsm",
- [3642]="nsm",
- [3647]="et",
- [3655]="nsm",
- [3656]="nsm",
- [3657]="nsm",
- [3658]="nsm",
- [3659]="nsm",
- [3660]="nsm",
- [3661]="nsm",
- [3662]="nsm",
- [3761]="nsm",
- [3764]="nsm",
- [3765]="nsm",
- [3766]="nsm",
- [3767]="nsm",
- [3768]="nsm",
- [3769]="nsm",
- [3771]="nsm",
- [3772]="nsm",
- [3784]="nsm",
- [3785]="nsm",
- [3786]="nsm",
- [3787]="nsm",
- [3788]="nsm",
- [3789]="nsm",
- [3864]="nsm",
- [3865]="nsm",
- [3893]="nsm",
- [3895]="nsm",
- [3897]="nsm",
- [3898]="on",
- [3899]="on",
- [3900]="on",
- [3901]="on",
- [3953]="nsm",
- [3954]="nsm",
- [3955]="nsm",
- [3956]="nsm",
- [3957]="nsm",
- [3958]="nsm",
- [3959]="nsm",
- [3960]="nsm",
- [3961]="nsm",
- [3962]="nsm",
- [3963]="nsm",
- [3964]="nsm",
- [3965]="nsm",
- [3966]="nsm",
- [3968]="nsm",
- [3969]="nsm",
- [3970]="nsm",
- [3971]="nsm",
- [3972]="nsm",
- [3974]="nsm",
- [3975]="nsm",
- [3981]="nsm",
- [3982]="nsm",
- [3983]="nsm",
- [3984]="nsm",
- [3985]="nsm",
- [3986]="nsm",
- [3987]="nsm",
- [3988]="nsm",
- [3989]="nsm",
- [3990]="nsm",
- [3991]="nsm",
- [3993]="nsm",
- [3994]="nsm",
- [3995]="nsm",
- [3996]="nsm",
- [3997]="nsm",
- [3998]="nsm",
- [3999]="nsm",
- [4000]="nsm",
- [4001]="nsm",
- [4002]="nsm",
- [4003]="nsm",
- [4004]="nsm",
- [4005]="nsm",
- [4006]="nsm",
- [4007]="nsm",
- [4008]="nsm",
- [4009]="nsm",
- [4010]="nsm",
- [4011]="nsm",
- [4012]="nsm",
- [4013]="nsm",
- [4014]="nsm",
- [4015]="nsm",
- [4016]="nsm",
- [4017]="nsm",
- [4018]="nsm",
- [4019]="nsm",
- [4020]="nsm",
- [4021]="nsm",
- [4022]="nsm",
- [4023]="nsm",
- [4024]="nsm",
- [4025]="nsm",
- [4026]="nsm",
- [4027]="nsm",
- [4028]="nsm",
- [4038]="nsm",
- [4141]="nsm",
- [4142]="nsm",
- [4143]="nsm",
- [4144]="nsm",
- [4146]="nsm",
- [4147]="nsm",
- [4148]="nsm",
- [4149]="nsm",
- [4150]="nsm",
- [4151]="nsm",
- [4153]="nsm",
- [4154]="nsm",
- [4157]="nsm",
- [4158]="nsm",
- [4184]="nsm",
- [4185]="nsm",
- [4190]="nsm",
- [4191]="nsm",
- [4192]="nsm",
- [4209]="nsm",
- [4210]="nsm",
- [4211]="nsm",
- [4212]="nsm",
- [4226]="nsm",
- [4229]="nsm",
- [4230]="nsm",
- [4237]="nsm",
- [4253]="nsm",
- [4957]="nsm",
- [4958]="nsm",
- [4959]="nsm",
- [5008]="on",
- [5009]="on",
- [5010]="on",
- [5011]="on",
- [5012]="on",
- [5013]="on",
- [5014]="on",
- [5015]="on",
- [5016]="on",
- [5017]="on",
- [5120]="on",
- [5760]="ws",
- [5787]="on",
- [5788]="on",
- [5906]="nsm",
- [5907]="nsm",
- [5908]="nsm",
- [5938]="nsm",
- [5939]="nsm",
- [5940]="nsm",
- [5970]="nsm",
- [5971]="nsm",
- [6002]="nsm",
- [6003]="nsm",
- [6068]="nsm",
- [6069]="nsm",
- [6071]="nsm",
- [6072]="nsm",
- [6073]="nsm",
- [6074]="nsm",
- [6075]="nsm",
- [6076]="nsm",
- [6077]="nsm",
- [6086]="nsm",
- [6089]="nsm",
- [6090]="nsm",
- [6091]="nsm",
- [6092]="nsm",
- [6093]="nsm",
- [6094]="nsm",
- [6095]="nsm",
- [6096]="nsm",
- [6097]="nsm",
- [6098]="nsm",
- [6099]="nsm",
- [6107]="et",
- [6109]="nsm",
- [6128]="on",
- [6129]="on",
- [6130]="on",
- [6131]="on",
- [6132]="on",
- [6133]="on",
- [6134]="on",
- [6135]="on",
- [6136]="on",
- [6137]="on",
- [6144]="on",
- [6145]="on",
- [6146]="on",
- [6147]="on",
- [6148]="on",
- [6149]="on",
- [6150]="on",
- [6151]="on",
- [6152]="on",
- [6153]="on",
- [6154]="on",
- [6155]="nsm",
- [6156]="nsm",
- [6157]="nsm",
- [6158]="bn",
- [6277]="nsm",
- [6278]="nsm",
- [6313]="nsm",
- [6432]="nsm",
- [6433]="nsm",
- [6434]="nsm",
- [6439]="nsm",
- [6440]="nsm",
- [6450]="nsm",
- [6457]="nsm",
- [6458]="nsm",
- [6459]="nsm",
- [6464]="on",
- [6468]="on",
- [6469]="on",
- [6622]="on",
- [6623]="on",
- [6624]="on",
- [6625]="on",
- [6626]="on",
- [6627]="on",
- [6628]="on",
- [6629]="on",
- [6630]="on",
- [6631]="on",
- [6632]="on",
- [6633]="on",
- [6634]="on",
- [6635]="on",
- [6636]="on",
- [6637]="on",
- [6638]="on",
- [6639]="on",
- [6640]="on",
- [6641]="on",
- [6642]="on",
- [6643]="on",
- [6644]="on",
- [6645]="on",
- [6646]="on",
- [6647]="on",
- [6648]="on",
- [6649]="on",
- [6650]="on",
- [6651]="on",
- [6652]="on",
- [6653]="on",
- [6654]="on",
- [6655]="on",
- [6679]="nsm",
- [6680]="nsm",
- [6683]="nsm",
- [6742]="nsm",
- [6744]="nsm",
- [6745]="nsm",
- [6746]="nsm",
- [6747]="nsm",
- [6748]="nsm",
- [6749]="nsm",
- [6750]="nsm",
- [6752]="nsm",
- [6754]="nsm",
- [6757]="nsm",
- [6758]="nsm",
- [6759]="nsm",
- [6760]="nsm",
- [6761]="nsm",
- [6762]="nsm",
- [6763]="nsm",
- [6764]="nsm",
- [6771]="nsm",
- [6772]="nsm",
- [6773]="nsm",
- [6774]="nsm",
- [6775]="nsm",
- [6776]="nsm",
- [6777]="nsm",
- [6778]="nsm",
- [6779]="nsm",
- [6780]="nsm",
- [6783]="nsm",
- [6832]="nsm",
- [6833]="nsm",
- [6834]="nsm",
- [6835]="nsm",
- [6836]="nsm",
- [6837]="nsm",
- [6838]="nsm",
- [6839]="nsm",
- [6840]="nsm",
- [6841]="nsm",
- [6842]="nsm",
- [6843]="nsm",
- [6844]="nsm",
- [6845]="nsm",
- [6846]="nsm",
- [6912]="nsm",
- [6913]="nsm",
- [6914]="nsm",
- [6915]="nsm",
- [6964]="nsm",
- [6966]="nsm",
- [6967]="nsm",
- [6968]="nsm",
- [6969]="nsm",
- [6970]="nsm",
- [6972]="nsm",
- [6978]="nsm",
- [7019]="nsm",
- [7020]="nsm",
- [7021]="nsm",
- [7022]="nsm",
- [7023]="nsm",
- [7024]="nsm",
- [7025]="nsm",
- [7026]="nsm",
- [7027]="nsm",
- [7040]="nsm",
- [7041]="nsm",
- [7074]="nsm",
- [7075]="nsm",
- [7076]="nsm",
- [7077]="nsm",
- [7080]="nsm",
- [7081]="nsm",
- [7083]="nsm",
- [7084]="nsm",
- [7085]="nsm",
- [7142]="nsm",
- [7144]="nsm",
- [7145]="nsm",
- [7149]="nsm",
- [7151]="nsm",
- [7152]="nsm",
- [7153]="nsm",
- [7212]="nsm",
- [7213]="nsm",
- [7214]="nsm",
- [7215]="nsm",
- [7216]="nsm",
- [7217]="nsm",
- [7218]="nsm",
- [7219]="nsm",
- [7222]="nsm",
- [7223]="nsm",
- [7376]="nsm",
- [7377]="nsm",
- [7378]="nsm",
- [7380]="nsm",
- [7381]="nsm",
- [7382]="nsm",
- [7383]="nsm",
- [7384]="nsm",
- [7385]="nsm",
- [7386]="nsm",
- [7387]="nsm",
- [7388]="nsm",
- [7389]="nsm",
- [7390]="nsm",
- [7391]="nsm",
- [7392]="nsm",
- [7394]="nsm",
- [7395]="nsm",
- [7396]="nsm",
- [7397]="nsm",
- [7398]="nsm",
- [7399]="nsm",
- [7400]="nsm",
- [7405]="nsm",
- [7412]="nsm",
- [7416]="nsm",
- [7417]="nsm",
- [7616]="nsm",
- [7617]="nsm",
- [7618]="nsm",
- [7619]="nsm",
- [7620]="nsm",
- [7621]="nsm",
- [7622]="nsm",
- [7623]="nsm",
- [7624]="nsm",
- [7625]="nsm",
- [7626]="nsm",
- [7627]="nsm",
- [7628]="nsm",
- [7629]="nsm",
- [7630]="nsm",
- [7631]="nsm",
- [7632]="nsm",
- [7633]="nsm",
- [7634]="nsm",
- [7635]="nsm",
- [7636]="nsm",
- [7637]="nsm",
- [7638]="nsm",
- [7639]="nsm",
- [7640]="nsm",
- [7641]="nsm",
- [7642]="nsm",
- [7643]="nsm",
- [7644]="nsm",
- [7645]="nsm",
- [7646]="nsm",
- [7647]="nsm",
- [7648]="nsm",
- [7649]="nsm",
- [7650]="nsm",
- [7651]="nsm",
- [7652]="nsm",
- [7653]="nsm",
- [7654]="nsm",
- [7655]="nsm",
- [7656]="nsm",
- [7657]="nsm",
- [7658]="nsm",
- [7659]="nsm",
- [7660]="nsm",
- [7661]="nsm",
- [7662]="nsm",
- [7663]="nsm",
- [7664]="nsm",
- [7665]="nsm",
- [7666]="nsm",
- [7667]="nsm",
- [7668]="nsm",
- [7669]="nsm",
- [7670]="nsm",
- [7671]="nsm",
- [7672]="nsm",
- [7673]="nsm",
- [7675]="nsm",
- [7676]="nsm",
- [7677]="nsm",
- [7678]="nsm",
- [7679]="nsm",
- [8125]="on",
- [8127]="on",
- [8128]="on",
- [8129]="on",
- [8141]="on",
- [8142]="on",
- [8143]="on",
- [8157]="on",
- [8158]="on",
- [8159]="on",
- [8173]="on",
- [8174]="on",
- [8175]="on",
- [8189]="on",
- [8190]="on",
- [8192]="ws",
- [8193]="ws",
- [8194]="ws",
- [8195]="ws",
- [8196]="ws",
- [8197]="ws",
- [8198]="ws",
- [8199]="ws",
- [8200]="ws",
- [8201]="ws",
- [8202]="ws",
- [8203]="bn",
- [8204]="bn",
- [8205]="bn",
- [8207]="r",
- [8208]="on",
- [8209]="on",
- [8210]="on",
- [8211]="on",
- [8212]="on",
- [8213]="on",
- [8214]="on",
- [8215]="on",
- [8216]="on",
- [8217]="on",
- [8218]="on",
- [8219]="on",
- [8220]="on",
- [8221]="on",
- [8222]="on",
- [8223]="on",
- [8224]="on",
- [8225]="on",
- [8226]="on",
- [8227]="on",
- [8228]="on",
- [8229]="on",
- [8230]="on",
- [8231]="on",
- [8232]="ws",
- [8233]="b",
- [8234]="lre",
- [8235]="rle",
- [8236]="pdf",
- [8237]="lro",
- [8238]="rlo",
- [8239]="cs",
- [8240]="et",
- [8241]="et",
- [8242]="et",
- [8243]="et",
- [8244]="et",
- [8245]="on",
- [8246]="on",
- [8247]="on",
- [8248]="on",
- [8249]="on",
- [8250]="on",
- [8251]="on",
- [8252]="on",
- [8253]="on",
- [8254]="on",
- [8255]="on",
- [8256]="on",
- [8257]="on",
- [8258]="on",
- [8259]="on",
- [8260]="cs",
- [8261]="on",
- [8262]="on",
- [8263]="on",
- [8264]="on",
- [8265]="on",
- [8266]="on",
- [8267]="on",
- [8268]="on",
- [8269]="on",
- [8270]="on",
- [8271]="on",
- [8272]="on",
- [8273]="on",
- [8274]="on",
- [8275]="on",
- [8276]="on",
- [8277]="on",
- [8278]="on",
- [8279]="on",
- [8280]="on",
- [8281]="on",
- [8282]="on",
- [8283]="on",
- [8284]="on",
- [8285]="on",
- [8286]="on",
- [8287]="ws",
- [8288]="bn",
- [8289]="bn",
- [8290]="bn",
- [8291]="bn",
- [8292]="bn",
- [8294]="lri",
- [8295]="rli",
- [8296]="fsi",
- [8297]="pdi",
- [8298]="bn",
- [8299]="bn",
- [8300]="bn",
- [8301]="bn",
- [8302]="bn",
- [8303]="bn",
- [8304]="en",
- [8308]="en",
- [8309]="en",
- [8310]="en",
- [8311]="en",
- [8312]="en",
- [8313]="en",
- [8314]="es",
- [8315]="es",
- [8316]="on",
- [8317]="on",
- [8318]="on",
- [8320]="en",
- [8321]="en",
- [8322]="en",
- [8323]="en",
- [8324]="en",
- [8325]="en",
- [8326]="en",
- [8327]="en",
- [8328]="en",
- [8329]="en",
- [8330]="es",
- [8331]="es",
- [8332]="on",
- [8333]="on",
- [8334]="on",
- [8352]="et",
- [8353]="et",
- [8354]="et",
- [8355]="et",
- [8356]="et",
- [8357]="et",
- [8358]="et",
- [8359]="et",
- [8360]="et",
- [8361]="et",
- [8362]="et",
- [8363]="et",
- [8364]="et",
- [8365]="et",
- [8366]="et",
- [8367]="et",
- [8368]="et",
- [8369]="et",
- [8370]="et",
- [8371]="et",
- [8372]="et",
- [8373]="et",
- [8374]="et",
- [8375]="et",
- [8376]="et",
- [8377]="et",
- [8378]="et",
- [8379]="et",
- [8380]="et",
- [8381]="et",
- [8382]="et",
- [8383]="et",
- [8400]="nsm",
- [8401]="nsm",
- [8402]="nsm",
- [8403]="nsm",
- [8404]="nsm",
- [8405]="nsm",
- [8406]="nsm",
- [8407]="nsm",
- [8408]="nsm",
- [8409]="nsm",
- [8410]="nsm",
- [8411]="nsm",
- [8412]="nsm",
- [8413]="nsm",
- [8414]="nsm",
- [8415]="nsm",
- [8416]="nsm",
- [8417]="nsm",
- [8418]="nsm",
- [8419]="nsm",
- [8420]="nsm",
- [8421]="nsm",
- [8422]="nsm",
- [8423]="nsm",
- [8424]="nsm",
- [8425]="nsm",
- [8426]="nsm",
- [8427]="nsm",
- [8428]="nsm",
- [8429]="nsm",
- [8430]="nsm",
- [8431]="nsm",
- [8432]="nsm",
- [8448]="on",
- [8449]="on",
- [8451]="on",
- [8452]="on",
- [8453]="on",
- [8454]="on",
- [8456]="on",
- [8457]="on",
- [8468]="on",
- [8470]="on",
- [8471]="on",
- [8472]="on",
- [8478]="on",
- [8479]="on",
- [8480]="on",
- [8481]="on",
- [8482]="on",
- [8483]="on",
- [8485]="on",
- [8487]="on",
- [8489]="on",
- [8494]="et",
- [8506]="on",
- [8507]="on",
- [8512]="on",
- [8513]="on",
- [8514]="on",
- [8515]="on",
- [8516]="on",
- [8522]="on",
- [8523]="on",
- [8524]="on",
- [8525]="on",
- [8528]="on",
- [8529]="on",
- [8530]="on",
- [8531]="on",
- [8532]="on",
- [8533]="on",
- [8534]="on",
- [8535]="on",
- [8536]="on",
- [8537]="on",
- [8538]="on",
- [8539]="on",
- [8540]="on",
- [8541]="on",
- [8542]="on",
- [8543]="on",
- [8585]="on",
- [8586]="on",
- [8587]="on",
- [8592]="on",
- [8593]="on",
- [8594]="on",
- [8595]="on",
- [8596]="on",
- [8597]="on",
- [8598]="on",
- [8599]="on",
- [8600]="on",
- [8601]="on",
- [8602]="on",
- [8603]="on",
- [8604]="on",
- [8605]="on",
- [8606]="on",
- [8607]="on",
- [8608]="on",
- [8609]="on",
- [8610]="on",
- [8611]="on",
- [8612]="on",
- [8613]="on",
- [8614]="on",
- [8615]="on",
- [8616]="on",
- [8617]="on",
- [8618]="on",
- [8619]="on",
- [8620]="on",
- [8621]="on",
- [8622]="on",
- [8623]="on",
- [8624]="on",
- [8625]="on",
- [8626]="on",
- [8627]="on",
- [8628]="on",
- [8629]="on",
- [8630]="on",
- [8631]="on",
- [8632]="on",
- [8633]="on",
- [8634]="on",
- [8635]="on",
- [8636]="on",
- [8637]="on",
- [8638]="on",
- [8639]="on",
- [8640]="on",
- [8641]="on",
- [8642]="on",
- [8643]="on",
- [8644]="on",
- [8645]="on",
- [8646]="on",
- [8647]="on",
- [8648]="on",
- [8649]="on",
- [8650]="on",
- [8651]="on",
- [8652]="on",
- [8653]="on",
- [8654]="on",
- [8655]="on",
- [8656]="on",
- [8657]="on",
- [8658]="on",
- [8659]="on",
- [8660]="on",
- [8661]="on",
- [8662]="on",
- [8663]="on",
- [8664]="on",
- [8665]="on",
- [8666]="on",
- [8667]="on",
- [8668]="on",
- [8669]="on",
- [8670]="on",
- [8671]="on",
- [8672]="on",
- [8673]="on",
- [8674]="on",
- [8675]="on",
- [8676]="on",
- [8677]="on",
- [8678]="on",
- [8679]="on",
- [8680]="on",
- [8681]="on",
- [8682]="on",
- [8683]="on",
- [8684]="on",
- [8685]="on",
- [8686]="on",
- [8687]="on",
- [8688]="on",
- [8689]="on",
- [8690]="on",
- [8691]="on",
- [8692]="on",
- [8693]="on",
- [8694]="on",
- [8695]="on",
- [8696]="on",
- [8697]="on",
- [8698]="on",
- [8699]="on",
- [8700]="on",
- [8701]="on",
- [8702]="on",
- [8703]="on",
- [8704]="on",
- [8705]="on",
- [8706]="on",
- [8707]="on",
- [8708]="on",
- [8709]="on",
- [8710]="on",
- [8711]="on",
- [8712]="on",
- [8713]="on",
- [8714]="on",
- [8715]="on",
- [8716]="on",
- [8717]="on",
- [8718]="on",
- [8719]="on",
- [8720]="on",
- [8721]="on",
- [8722]="es",
- [8723]="et",
- [8724]="on",
- [8725]="on",
- [8726]="on",
- [8727]="on",
- [8728]="on",
- [8729]="on",
- [8730]="on",
- [8731]="on",
- [8732]="on",
- [8733]="on",
- [8734]="on",
- [8735]="on",
- [8736]="on",
- [8737]="on",
- [8738]="on",
- [8739]="on",
- [8740]="on",
- [8741]="on",
- [8742]="on",
- [8743]="on",
- [8744]="on",
- [8745]="on",
- [8746]="on",
- [8747]="on",
- [8748]="on",
- [8749]="on",
- [8750]="on",
- [8751]="on",
- [8752]="on",
- [8753]="on",
- [8754]="on",
- [8755]="on",
- [8756]="on",
- [8757]="on",
- [8758]="on",
- [8759]="on",
- [8760]="on",
- [8761]="on",
- [8762]="on",
- [8763]="on",
- [8764]="on",
- [8765]="on",
- [8766]="on",
- [8767]="on",
- [8768]="on",
- [8769]="on",
- [8770]="on",
- [8771]="on",
- [8772]="on",
- [8773]="on",
- [8774]="on",
- [8775]="on",
- [8776]="on",
- [8777]="on",
- [8778]="on",
- [8779]="on",
- [8780]="on",
- [8781]="on",
- [8782]="on",
- [8783]="on",
- [8784]="on",
- [8785]="on",
- [8786]="on",
- [8787]="on",
- [8788]="on",
- [8789]="on",
- [8790]="on",
- [8791]="on",
- [8792]="on",
- [8793]="on",
- [8794]="on",
- [8795]="on",
- [8796]="on",
- [8797]="on",
- [8798]="on",
- [8799]="on",
- [8800]="on",
- [8801]="on",
- [8802]="on",
- [8803]="on",
- [8804]="on",
- [8805]="on",
- [8806]="on",
- [8807]="on",
- [8808]="on",
- [8809]="on",
- [8810]="on",
- [8811]="on",
- [8812]="on",
- [8813]="on",
- [8814]="on",
- [8815]="on",
- [8816]="on",
- [8817]="on",
- [8818]="on",
- [8819]="on",
- [8820]="on",
- [8821]="on",
- [8822]="on",
- [8823]="on",
- [8824]="on",
- [8825]="on",
- [8826]="on",
- [8827]="on",
- [8828]="on",
- [8829]="on",
- [8830]="on",
- [8831]="on",
- [8832]="on",
- [8833]="on",
- [8834]="on",
- [8835]="on",
- [8836]="on",
- [8837]="on",
- [8838]="on",
- [8839]="on",
- [8840]="on",
- [8841]="on",
- [8842]="on",
- [8843]="on",
- [8844]="on",
- [8845]="on",
- [8846]="on",
- [8847]="on",
- [8848]="on",
- [8849]="on",
- [8850]="on",
- [8851]="on",
- [8852]="on",
- [8853]="on",
- [8854]="on",
- [8855]="on",
- [8856]="on",
- [8857]="on",
- [8858]="on",
- [8859]="on",
- [8860]="on",
- [8861]="on",
- [8862]="on",
- [8863]="on",
- [8864]="on",
- [8865]="on",
- [8866]="on",
- [8867]="on",
- [8868]="on",
- [8869]="on",
- [8870]="on",
- [8871]="on",
- [8872]="on",
- [8873]="on",
- [8874]="on",
- [8875]="on",
- [8876]="on",
- [8877]="on",
- [8878]="on",
- [8879]="on",
- [8880]="on",
- [8881]="on",
- [8882]="on",
- [8883]="on",
- [8884]="on",
- [8885]="on",
- [8886]="on",
- [8887]="on",
- [8888]="on",
- [8889]="on",
- [8890]="on",
- [8891]="on",
- [8892]="on",
- [8893]="on",
- [8894]="on",
- [8895]="on",
- [8896]="on",
- [8897]="on",
- [8898]="on",
- [8899]="on",
- [8900]="on",
- [8901]="on",
- [8902]="on",
- [8903]="on",
- [8904]="on",
- [8905]="on",
- [8906]="on",
- [8907]="on",
- [8908]="on",
- [8909]="on",
- [8910]="on",
- [8911]="on",
- [8912]="on",
- [8913]="on",
- [8914]="on",
- [8915]="on",
- [8916]="on",
- [8917]="on",
- [8918]="on",
- [8919]="on",
- [8920]="on",
- [8921]="on",
- [8922]="on",
- [8923]="on",
- [8924]="on",
- [8925]="on",
- [8926]="on",
- [8927]="on",
- [8928]="on",
- [8929]="on",
- [8930]="on",
- [8931]="on",
- [8932]="on",
- [8933]="on",
- [8934]="on",
- [8935]="on",
- [8936]="on",
- [8937]="on",
- [8938]="on",
- [8939]="on",
- [8940]="on",
- [8941]="on",
- [8942]="on",
- [8943]="on",
- [8944]="on",
- [8945]="on",
- [8946]="on",
- [8947]="on",
- [8948]="on",
- [8949]="on",
- [8950]="on",
- [8951]="on",
- [8952]="on",
- [8953]="on",
- [8954]="on",
- [8955]="on",
- [8956]="on",
- [8957]="on",
- [8958]="on",
- [8959]="on",
- [8960]="on",
- [8961]="on",
- [8962]="on",
- [8963]="on",
- [8964]="on",
- [8965]="on",
- [8966]="on",
- [8967]="on",
- [8968]="on",
- [8969]="on",
- [8970]="on",
- [8971]="on",
- [8972]="on",
- [8973]="on",
- [8974]="on",
- [8975]="on",
- [8976]="on",
- [8977]="on",
- [8978]="on",
- [8979]="on",
- [8980]="on",
- [8981]="on",
- [8982]="on",
- [8983]="on",
- [8984]="on",
- [8985]="on",
- [8986]="on",
- [8987]="on",
- [8988]="on",
- [8989]="on",
- [8990]="on",
- [8991]="on",
- [8992]="on",
- [8993]="on",
- [8994]="on",
- [8995]="on",
- [8996]="on",
- [8997]="on",
- [8998]="on",
- [8999]="on",
- [9000]="on",
- [9001]="on",
- [9002]="on",
- [9003]="on",
- [9004]="on",
- [9005]="on",
- [9006]="on",
- [9007]="on",
- [9008]="on",
- [9009]="on",
- [9010]="on",
- [9011]="on",
- [9012]="on",
- [9013]="on",
- [9083]="on",
- [9084]="on",
- [9085]="on",
- [9086]="on",
- [9087]="on",
- [9088]="on",
- [9089]="on",
- [9090]="on",
- [9091]="on",
- [9092]="on",
- [9093]="on",
- [9094]="on",
- [9095]="on",
- [9096]="on",
- [9097]="on",
- [9098]="on",
- [9099]="on",
- [9100]="on",
- [9101]="on",
- [9102]="on",
- [9103]="on",
- [9104]="on",
- [9105]="on",
- [9106]="on",
- [9107]="on",
- [9108]="on",
- [9110]="on",
- [9111]="on",
- [9112]="on",
- [9113]="on",
- [9114]="on",
- [9115]="on",
- [9116]="on",
- [9117]="on",
- [9118]="on",
- [9119]="on",
- [9120]="on",
- [9121]="on",
- [9122]="on",
- [9123]="on",
- [9124]="on",
- [9125]="on",
- [9126]="on",
- [9127]="on",
- [9128]="on",
- [9129]="on",
- [9130]="on",
- [9131]="on",
- [9132]="on",
- [9133]="on",
- [9134]="on",
- [9135]="on",
- [9136]="on",
- [9137]="on",
- [9138]="on",
- [9139]="on",
- [9140]="on",
- [9141]="on",
- [9142]="on",
- [9143]="on",
- [9144]="on",
- [9145]="on",
- [9146]="on",
- [9147]="on",
- [9148]="on",
- [9149]="on",
- [9150]="on",
- [9151]="on",
- [9152]="on",
- [9153]="on",
- [9154]="on",
- [9155]="on",
- [9156]="on",
- [9157]="on",
- [9158]="on",
- [9159]="on",
- [9160]="on",
- [9161]="on",
- [9162]="on",
- [9163]="on",
- [9164]="on",
- [9165]="on",
- [9166]="on",
- [9167]="on",
- [9168]="on",
- [9169]="on",
- [9170]="on",
- [9171]="on",
- [9172]="on",
- [9173]="on",
- [9174]="on",
- [9175]="on",
- [9176]="on",
- [9177]="on",
- [9178]="on",
- [9179]="on",
- [9180]="on",
- [9181]="on",
- [9182]="on",
- [9183]="on",
- [9184]="on",
- [9185]="on",
- [9186]="on",
- [9187]="on",
- [9188]="on",
- [9189]="on",
- [9190]="on",
- [9191]="on",
- [9192]="on",
- [9193]="on",
- [9194]="on",
- [9195]="on",
- [9196]="on",
- [9197]="on",
- [9198]="on",
- [9199]="on",
- [9200]="on",
- [9201]="on",
- [9202]="on",
- [9203]="on",
- [9204]="on",
- [9205]="on",
- [9206]="on",
- [9207]="on",
- [9208]="on",
- [9209]="on",
- [9210]="on",
- [9211]="on",
- [9212]="on",
- [9213]="on",
- [9214]="on",
- [9215]="on",
- [9216]="on",
- [9217]="on",
- [9218]="on",
- [9219]="on",
- [9220]="on",
- [9221]="on",
- [9222]="on",
- [9223]="on",
- [9224]="on",
- [9225]="on",
- [9226]="on",
- [9227]="on",
- [9228]="on",
- [9229]="on",
- [9230]="on",
- [9231]="on",
- [9232]="on",
- [9233]="on",
- [9234]="on",
- [9235]="on",
- [9236]="on",
- [9237]="on",
- [9238]="on",
- [9239]="on",
- [9240]="on",
- [9241]="on",
- [9242]="on",
- [9243]="on",
- [9244]="on",
- [9245]="on",
- [9246]="on",
- [9247]="on",
- [9248]="on",
- [9249]="on",
- [9250]="on",
- [9251]="on",
- [9252]="on",
- [9253]="on",
- [9254]="on",
- [9280]="on",
- [9281]="on",
- [9282]="on",
- [9283]="on",
- [9284]="on",
- [9285]="on",
- [9286]="on",
- [9287]="on",
- [9288]="on",
- [9289]="on",
- [9290]="on",
- [9312]="on",
- [9313]="on",
- [9314]="on",
- [9315]="on",
- [9316]="on",
- [9317]="on",
- [9318]="on",
- [9319]="on",
- [9320]="on",
- [9321]="on",
- [9322]="on",
- [9323]="on",
- [9324]="on",
- [9325]="on",
- [9326]="on",
- [9327]="on",
- [9328]="on",
- [9329]="on",
- [9330]="on",
- [9331]="on",
- [9332]="on",
- [9333]="on",
- [9334]="on",
- [9335]="on",
- [9336]="on",
- [9337]="on",
- [9338]="on",
- [9339]="on",
- [9340]="on",
- [9341]="on",
- [9342]="on",
- [9343]="on",
- [9344]="on",
- [9345]="on",
- [9346]="on",
- [9347]="on",
- [9348]="on",
- [9349]="on",
- [9350]="on",
- [9351]="on",
- [9352]="en",
- [9353]="en",
- [9354]="en",
- [9355]="en",
- [9356]="en",
- [9357]="en",
- [9358]="en",
- [9359]="en",
- [9360]="en",
- [9361]="en",
- [9362]="en",
- [9363]="en",
- [9364]="en",
- [9365]="en",
- [9366]="en",
- [9367]="en",
- [9368]="en",
- [9369]="en",
- [9370]="en",
- [9371]="en",
- [9450]="on",
- [9451]="on",
- [9452]="on",
- [9453]="on",
- [9454]="on",
- [9455]="on",
- [9456]="on",
- [9457]="on",
- [9458]="on",
- [9459]="on",
- [9460]="on",
- [9461]="on",
- [9462]="on",
- [9463]="on",
- [9464]="on",
- [9465]="on",
- [9466]="on",
- [9467]="on",
- [9468]="on",
- [9469]="on",
- [9470]="on",
- [9471]="on",
- [9472]="on",
- [9473]="on",
- [9474]="on",
- [9475]="on",
- [9476]="on",
- [9477]="on",
- [9478]="on",
- [9479]="on",
- [9480]="on",
- [9481]="on",
- [9482]="on",
- [9483]="on",
- [9484]="on",
- [9485]="on",
- [9486]="on",
- [9487]="on",
- [9488]="on",
- [9489]="on",
- [9490]="on",
- [9491]="on",
- [9492]="on",
- [9493]="on",
- [9494]="on",
- [9495]="on",
- [9496]="on",
- [9497]="on",
- [9498]="on",
- [9499]="on",
- [9500]="on",
- [9501]="on",
- [9502]="on",
- [9503]="on",
- [9504]="on",
- [9505]="on",
- [9506]="on",
- [9507]="on",
- [9508]="on",
- [9509]="on",
- [9510]="on",
- [9511]="on",
- [9512]="on",
- [9513]="on",
- [9514]="on",
- [9515]="on",
- [9516]="on",
- [9517]="on",
- [9518]="on",
- [9519]="on",
- [9520]="on",
- [9521]="on",
- [9522]="on",
- [9523]="on",
- [9524]="on",
- [9525]="on",
- [9526]="on",
- [9527]="on",
- [9528]="on",
- [9529]="on",
- [9530]="on",
- [9531]="on",
- [9532]="on",
- [9533]="on",
- [9534]="on",
- [9535]="on",
- [9536]="on",
- [9537]="on",
- [9538]="on",
- [9539]="on",
- [9540]="on",
- [9541]="on",
- [9542]="on",
- [9543]="on",
- [9544]="on",
- [9545]="on",
- [9546]="on",
- [9547]="on",
- [9548]="on",
- [9549]="on",
- [9550]="on",
- [9551]="on",
- [9552]="on",
- [9553]="on",
- [9554]="on",
- [9555]="on",
- [9556]="on",
- [9557]="on",
- [9558]="on",
- [9559]="on",
- [9560]="on",
- [9561]="on",
- [9562]="on",
- [9563]="on",
- [9564]="on",
- [9565]="on",
- [9566]="on",
- [9567]="on",
- [9568]="on",
- [9569]="on",
- [9570]="on",
- [9571]="on",
- [9572]="on",
- [9573]="on",
- [9574]="on",
- [9575]="on",
- [9576]="on",
- [9577]="on",
- [9578]="on",
- [9579]="on",
- [9580]="on",
- [9581]="on",
- [9582]="on",
- [9583]="on",
- [9584]="on",
- [9585]="on",
- [9586]="on",
- [9587]="on",
- [9588]="on",
- [9589]="on",
- [9590]="on",
- [9591]="on",
- [9592]="on",
- [9593]="on",
- [9594]="on",
- [9595]="on",
- [9596]="on",
- [9597]="on",
- [9598]="on",
- [9599]="on",
- [9600]="on",
- [9601]="on",
- [9602]="on",
- [9603]="on",
- [9604]="on",
- [9605]="on",
- [9606]="on",
- [9607]="on",
- [9608]="on",
- [9609]="on",
- [9610]="on",
- [9611]="on",
- [9612]="on",
- [9613]="on",
- [9614]="on",
- [9615]="on",
- [9616]="on",
- [9617]="on",
- [9618]="on",
- [9619]="on",
- [9620]="on",
- [9621]="on",
- [9622]="on",
- [9623]="on",
- [9624]="on",
- [9625]="on",
- [9626]="on",
- [9627]="on",
- [9628]="on",
- [9629]="on",
- [9630]="on",
- [9631]="on",
- [9632]="on",
- [9633]="on",
- [9634]="on",
- [9635]="on",
- [9636]="on",
- [9637]="on",
- [9638]="on",
- [9639]="on",
- [9640]="on",
- [9641]="on",
- [9642]="on",
- [9643]="on",
- [9644]="on",
- [9645]="on",
- [9646]="on",
- [9647]="on",
- [9648]="on",
- [9649]="on",
- [9650]="on",
- [9651]="on",
- [9652]="on",
- [9653]="on",
- [9654]="on",
- [9655]="on",
- [9656]="on",
- [9657]="on",
- [9658]="on",
- [9659]="on",
- [9660]="on",
- [9661]="on",
- [9662]="on",
- [9663]="on",
- [9664]="on",
- [9665]="on",
- [9666]="on",
- [9667]="on",
- [9668]="on",
- [9669]="on",
- [9670]="on",
- [9671]="on",
- [9672]="on",
- [9673]="on",
- [9674]="on",
- [9675]="on",
- [9676]="on",
- [9677]="on",
- [9678]="on",
- [9679]="on",
- [9680]="on",
- [9681]="on",
- [9682]="on",
- [9683]="on",
- [9684]="on",
- [9685]="on",
- [9686]="on",
- [9687]="on",
- [9688]="on",
- [9689]="on",
- [9690]="on",
- [9691]="on",
- [9692]="on",
- [9693]="on",
- [9694]="on",
- [9695]="on",
- [9696]="on",
- [9697]="on",
- [9698]="on",
- [9699]="on",
- [9700]="on",
- [9701]="on",
- [9702]="on",
- [9703]="on",
- [9704]="on",
- [9705]="on",
- [9706]="on",
- [9707]="on",
- [9708]="on",
- [9709]="on",
- [9710]="on",
- [9711]="on",
- [9712]="on",
- [9713]="on",
- [9714]="on",
- [9715]="on",
- [9716]="on",
- [9717]="on",
- [9718]="on",
- [9719]="on",
- [9720]="on",
- [9721]="on",
- [9722]="on",
- [9723]="on",
- [9724]="on",
- [9725]="on",
- [9726]="on",
- [9727]="on",
- [9728]="on",
- [9729]="on",
- [9730]="on",
- [9731]="on",
- [9732]="on",
- [9733]="on",
- [9734]="on",
- [9735]="on",
- [9736]="on",
- [9737]="on",
- [9738]="on",
- [9739]="on",
- [9740]="on",
- [9741]="on",
- [9742]="on",
- [9743]="on",
- [9744]="on",
- [9745]="on",
- [9746]="on",
- [9747]="on",
- [9748]="on",
- [9749]="on",
- [9750]="on",
- [9751]="on",
- [9752]="on",
- [9753]="on",
- [9754]="on",
- [9755]="on",
- [9756]="on",
- [9757]="on",
- [9758]="on",
- [9759]="on",
- [9760]="on",
- [9761]="on",
- [9762]="on",
- [9763]="on",
- [9764]="on",
- [9765]="on",
- [9766]="on",
- [9767]="on",
- [9768]="on",
- [9769]="on",
- [9770]="on",
- [9771]="on",
- [9772]="on",
- [9773]="on",
- [9774]="on",
- [9775]="on",
- [9776]="on",
- [9777]="on",
- [9778]="on",
- [9779]="on",
- [9780]="on",
- [9781]="on",
- [9782]="on",
- [9783]="on",
- [9784]="on",
- [9785]="on",
- [9786]="on",
- [9787]="on",
- [9788]="on",
- [9789]="on",
- [9790]="on",
- [9791]="on",
- [9792]="on",
- [9793]="on",
- [9794]="on",
- [9795]="on",
- [9796]="on",
- [9797]="on",
- [9798]="on",
- [9799]="on",
- [9800]="on",
- [9801]="on",
- [9802]="on",
- [9803]="on",
- [9804]="on",
- [9805]="on",
- [9806]="on",
- [9807]="on",
- [9808]="on",
- [9809]="on",
- [9810]="on",
- [9811]="on",
- [9812]="on",
- [9813]="on",
- [9814]="on",
- [9815]="on",
- [9816]="on",
- [9817]="on",
- [9818]="on",
- [9819]="on",
- [9820]="on",
- [9821]="on",
- [9822]="on",
- [9823]="on",
- [9824]="on",
- [9825]="on",
- [9826]="on",
- [9827]="on",
- [9828]="on",
- [9829]="on",
- [9830]="on",
- [9831]="on",
- [9832]="on",
- [9833]="on",
- [9834]="on",
- [9835]="on",
- [9836]="on",
- [9837]="on",
- [9838]="on",
- [9839]="on",
- [9840]="on",
- [9841]="on",
- [9842]="on",
- [9843]="on",
- [9844]="on",
- [9845]="on",
- [9846]="on",
- [9847]="on",
- [9848]="on",
- [9849]="on",
- [9850]="on",
- [9851]="on",
- [9852]="on",
- [9853]="on",
- [9854]="on",
- [9855]="on",
- [9856]="on",
- [9857]="on",
- [9858]="on",
- [9859]="on",
- [9860]="on",
- [9861]="on",
- [9862]="on",
- [9863]="on",
- [9864]="on",
- [9865]="on",
- [9866]="on",
- [9867]="on",
- [9868]="on",
- [9869]="on",
- [9870]="on",
- [9871]="on",
- [9872]="on",
- [9873]="on",
- [9874]="on",
- [9875]="on",
- [9876]="on",
- [9877]="on",
- [9878]="on",
- [9879]="on",
- [9880]="on",
- [9881]="on",
- [9882]="on",
- [9883]="on",
- [9884]="on",
- [9885]="on",
- [9886]="on",
- [9887]="on",
- [9888]="on",
- [9889]="on",
- [9890]="on",
- [9891]="on",
- [9892]="on",
- [9893]="on",
- [9894]="on",
- [9895]="on",
- [9896]="on",
- [9897]="on",
- [9898]="on",
- [9899]="on",
- [9901]="on",
- [9902]="on",
- [9903]="on",
- [9904]="on",
- [9905]="on",
- [9906]="on",
- [9907]="on",
- [9908]="on",
- [9909]="on",
- [9910]="on",
- [9911]="on",
- [9912]="on",
- [9913]="on",
- [9914]="on",
- [9915]="on",
- [9916]="on",
- [9917]="on",
- [9918]="on",
- [9919]="on",
- [9920]="on",
- [9921]="on",
- [9922]="on",
- [9923]="on",
- [9924]="on",
- [9925]="on",
- [9926]="on",
- [9927]="on",
- [9928]="on",
- [9929]="on",
- [9930]="on",
- [9931]="on",
- [9932]="on",
- [9933]="on",
- [9934]="on",
- [9935]="on",
- [9936]="on",
- [9937]="on",
- [9938]="on",
- [9939]="on",
- [9940]="on",
- [9941]="on",
- [9942]="on",
- [9943]="on",
- [9944]="on",
- [9945]="on",
- [9946]="on",
- [9947]="on",
- [9948]="on",
- [9949]="on",
- [9950]="on",
- [9951]="on",
- [9952]="on",
- [9953]="on",
- [9954]="on",
- [9955]="on",
- [9956]="on",
- [9957]="on",
- [9958]="on",
- [9959]="on",
- [9960]="on",
- [9961]="on",
- [9962]="on",
- [9963]="on",
- [9964]="on",
- [9965]="on",
- [9966]="on",
- [9967]="on",
- [9968]="on",
- [9969]="on",
- [9970]="on",
- [9971]="on",
- [9972]="on",
- [9973]="on",
- [9974]="on",
- [9975]="on",
- [9976]="on",
- [9977]="on",
- [9978]="on",
- [9979]="on",
- [9980]="on",
- [9981]="on",
- [9982]="on",
- [9983]="on",
- [9984]="on",
- [9985]="on",
- [9986]="on",
- [9987]="on",
- [9988]="on",
- [9989]="on",
- [9990]="on",
- [9991]="on",
- [9992]="on",
- [9993]="on",
- [9994]="on",
- [9995]="on",
- [9996]="on",
- [9997]="on",
- [9998]="on",
- [9999]="on",
- [10000]="on",
- [10001]="on",
- [10002]="on",
- [10003]="on",
- [10004]="on",
- [10005]="on",
- [10006]="on",
- [10007]="on",
- [10008]="on",
- [10009]="on",
- [10010]="on",
- [10011]="on",
- [10012]="on",
- [10013]="on",
- [10014]="on",
- [10015]="on",
- [10016]="on",
- [10017]="on",
- [10018]="on",
- [10019]="on",
- [10020]="on",
- [10021]="on",
- [10022]="on",
- [10023]="on",
- [10024]="on",
- [10025]="on",
- [10026]="on",
- [10027]="on",
- [10028]="on",
- [10029]="on",
- [10030]="on",
- [10031]="on",
- [10032]="on",
- [10033]="on",
- [10034]="on",
- [10035]="on",
- [10036]="on",
- [10037]="on",
- [10038]="on",
- [10039]="on",
- [10040]="on",
- [10041]="on",
- [10042]="on",
- [10043]="on",
- [10044]="on",
- [10045]="on",
- [10046]="on",
- [10047]="on",
- [10048]="on",
- [10049]="on",
- [10050]="on",
- [10051]="on",
- [10052]="on",
- [10053]="on",
- [10054]="on",
- [10055]="on",
- [10056]="on",
- [10057]="on",
- [10058]="on",
- [10059]="on",
- [10060]="on",
- [10061]="on",
- [10062]="on",
- [10063]="on",
- [10064]="on",
- [10065]="on",
- [10066]="on",
- [10067]="on",
- [10068]="on",
- [10069]="on",
- [10070]="on",
- [10071]="on",
- [10072]="on",
- [10073]="on",
- [10074]="on",
- [10075]="on",
- [10076]="on",
- [10077]="on",
- [10078]="on",
- [10079]="on",
- [10080]="on",
- [10081]="on",
- [10082]="on",
- [10083]="on",
- [10084]="on",
- [10085]="on",
- [10086]="on",
- [10087]="on",
- [10088]="on",
- [10089]="on",
- [10090]="on",
- [10091]="on",
- [10092]="on",
- [10093]="on",
- [10094]="on",
- [10095]="on",
- [10096]="on",
- [10097]="on",
- [10098]="on",
- [10099]="on",
- [10100]="on",
- [10101]="on",
- [10102]="on",
- [10103]="on",
- [10104]="on",
- [10105]="on",
- [10106]="on",
- [10107]="on",
- [10108]="on",
- [10109]="on",
- [10110]="on",
- [10111]="on",
- [10112]="on",
- [10113]="on",
- [10114]="on",
- [10115]="on",
- [10116]="on",
- [10117]="on",
- [10118]="on",
- [10119]="on",
- [10120]="on",
- [10121]="on",
- [10122]="on",
- [10123]="on",
- [10124]="on",
- [10125]="on",
- [10126]="on",
- [10127]="on",
- [10128]="on",
- [10129]="on",
- [10130]="on",
- [10131]="on",
- [10132]="on",
- [10133]="on",
- [10134]="on",
- [10135]="on",
- [10136]="on",
- [10137]="on",
- [10138]="on",
- [10139]="on",
- [10140]="on",
- [10141]="on",
- [10142]="on",
- [10143]="on",
- [10144]="on",
- [10145]="on",
- [10146]="on",
- [10147]="on",
- [10148]="on",
- [10149]="on",
- [10150]="on",
- [10151]="on",
- [10152]="on",
- [10153]="on",
- [10154]="on",
- [10155]="on",
- [10156]="on",
- [10157]="on",
- [10158]="on",
- [10159]="on",
- [10160]="on",
- [10161]="on",
- [10162]="on",
- [10163]="on",
- [10164]="on",
- [10165]="on",
- [10166]="on",
- [10167]="on",
- [10168]="on",
- [10169]="on",
- [10170]="on",
- [10171]="on",
- [10172]="on",
- [10173]="on",
- [10174]="on",
- [10175]="on",
- [10176]="on",
- [10177]="on",
- [10178]="on",
- [10179]="on",
- [10180]="on",
- [10181]="on",
- [10182]="on",
- [10183]="on",
- [10184]="on",
- [10185]="on",
- [10186]="on",
- [10187]="on",
- [10188]="on",
- [10189]="on",
- [10190]="on",
- [10191]="on",
- [10192]="on",
- [10193]="on",
- [10194]="on",
- [10195]="on",
- [10196]="on",
- [10197]="on",
- [10198]="on",
- [10199]="on",
- [10200]="on",
- [10201]="on",
- [10202]="on",
- [10203]="on",
- [10204]="on",
- [10205]="on",
- [10206]="on",
- [10207]="on",
- [10208]="on",
- [10209]="on",
- [10210]="on",
- [10211]="on",
- [10212]="on",
- [10213]="on",
- [10214]="on",
- [10215]="on",
- [10216]="on",
- [10217]="on",
- [10218]="on",
- [10219]="on",
- [10220]="on",
- [10221]="on",
- [10222]="on",
- [10223]="on",
- [10224]="on",
- [10225]="on",
- [10226]="on",
- [10227]="on",
- [10228]="on",
- [10229]="on",
- [10230]="on",
- [10231]="on",
- [10232]="on",
- [10233]="on",
- [10234]="on",
- [10235]="on",
- [10236]="on",
- [10237]="on",
- [10238]="on",
- [10239]="on",
- [10496]="on",
- [10497]="on",
- [10498]="on",
- [10499]="on",
- [10500]="on",
- [10501]="on",
- [10502]="on",
- [10503]="on",
- [10504]="on",
- [10505]="on",
- [10506]="on",
- [10507]="on",
- [10508]="on",
- [10509]="on",
- [10510]="on",
- [10511]="on",
- [10512]="on",
- [10513]="on",
- [10514]="on",
- [10515]="on",
- [10516]="on",
- [10517]="on",
- [10518]="on",
- [10519]="on",
- [10520]="on",
- [10521]="on",
- [10522]="on",
- [10523]="on",
- [10524]="on",
- [10525]="on",
- [10526]="on",
- [10527]="on",
- [10528]="on",
- [10529]="on",
- [10530]="on",
- [10531]="on",
- [10532]="on",
- [10533]="on",
- [10534]="on",
- [10535]="on",
- [10536]="on",
- [10537]="on",
- [10538]="on",
- [10539]="on",
- [10540]="on",
- [10541]="on",
- [10542]="on",
- [10543]="on",
- [10544]="on",
- [10545]="on",
- [10546]="on",
- [10547]="on",
- [10548]="on",
- [10549]="on",
- [10550]="on",
- [10551]="on",
- [10552]="on",
- [10553]="on",
- [10554]="on",
- [10555]="on",
- [10556]="on",
- [10557]="on",
- [10558]="on",
- [10559]="on",
- [10560]="on",
- [10561]="on",
- [10562]="on",
- [10563]="on",
- [10564]="on",
- [10565]="on",
- [10566]="on",
- [10567]="on",
- [10568]="on",
- [10569]="on",
- [10570]="on",
- [10571]="on",
- [10572]="on",
- [10573]="on",
- [10574]="on",
- [10575]="on",
- [10576]="on",
- [10577]="on",
- [10578]="on",
- [10579]="on",
- [10580]="on",
- [10581]="on",
- [10582]="on",
- [10583]="on",
- [10584]="on",
- [10585]="on",
- [10586]="on",
- [10587]="on",
- [10588]="on",
- [10589]="on",
- [10590]="on",
- [10591]="on",
- [10592]="on",
- [10593]="on",
- [10594]="on",
- [10595]="on",
- [10596]="on",
- [10597]="on",
- [10598]="on",
- [10599]="on",
- [10600]="on",
- [10601]="on",
- [10602]="on",
- [10603]="on",
- [10604]="on",
- [10605]="on",
- [10606]="on",
- [10607]="on",
- [10608]="on",
- [10609]="on",
- [10610]="on",
- [10611]="on",
- [10612]="on",
- [10613]="on",
- [10614]="on",
- [10615]="on",
- [10616]="on",
- [10617]="on",
- [10618]="on",
- [10619]="on",
- [10620]="on",
- [10621]="on",
- [10622]="on",
- [10623]="on",
- [10624]="on",
- [10625]="on",
- [10626]="on",
- [10627]="on",
- [10628]="on",
- [10629]="on",
- [10630]="on",
- [10631]="on",
- [10632]="on",
- [10633]="on",
- [10634]="on",
- [10635]="on",
- [10636]="on",
- [10637]="on",
- [10638]="on",
- [10639]="on",
- [10640]="on",
- [10641]="on",
- [10642]="on",
- [10643]="on",
- [10644]="on",
- [10645]="on",
- [10646]="on",
- [10647]="on",
- [10648]="on",
- [10649]="on",
- [10650]="on",
- [10651]="on",
- [10652]="on",
- [10653]="on",
- [10654]="on",
- [10655]="on",
- [10656]="on",
- [10657]="on",
- [10658]="on",
- [10659]="on",
- [10660]="on",
- [10661]="on",
- [10662]="on",
- [10663]="on",
- [10664]="on",
- [10665]="on",
- [10666]="on",
- [10667]="on",
- [10668]="on",
- [10669]="on",
- [10670]="on",
- [10671]="on",
- [10672]="on",
- [10673]="on",
- [10674]="on",
- [10675]="on",
- [10676]="on",
- [10677]="on",
- [10678]="on",
- [10679]="on",
- [10680]="on",
- [10681]="on",
- [10682]="on",
- [10683]="on",
- [10684]="on",
- [10685]="on",
- [10686]="on",
- [10687]="on",
- [10688]="on",
- [10689]="on",
- [10690]="on",
- [10691]="on",
- [10692]="on",
- [10693]="on",
- [10694]="on",
- [10695]="on",
- [10696]="on",
- [10697]="on",
- [10698]="on",
- [10699]="on",
- [10700]="on",
- [10701]="on",
- [10702]="on",
- [10703]="on",
- [10704]="on",
- [10705]="on",
- [10706]="on",
- [10707]="on",
- [10708]="on",
- [10709]="on",
- [10710]="on",
- [10711]="on",
- [10712]="on",
- [10713]="on",
- [10714]="on",
- [10715]="on",
- [10716]="on",
- [10717]="on",
- [10718]="on",
- [10719]="on",
- [10720]="on",
- [10721]="on",
- [10722]="on",
- [10723]="on",
- [10724]="on",
- [10725]="on",
- [10726]="on",
- [10727]="on",
- [10728]="on",
- [10729]="on",
- [10730]="on",
- [10731]="on",
- [10732]="on",
- [10733]="on",
- [10734]="on",
- [10735]="on",
- [10736]="on",
- [10737]="on",
- [10738]="on",
- [10739]="on",
- [10740]="on",
- [10741]="on",
- [10742]="on",
- [10743]="on",
- [10744]="on",
- [10745]="on",
- [10746]="on",
- [10747]="on",
- [10748]="on",
- [10749]="on",
- [10750]="on",
- [10751]="on",
- [10752]="on",
- [10753]="on",
- [10754]="on",
- [10755]="on",
- [10756]="on",
- [10757]="on",
- [10758]="on",
- [10759]="on",
- [10760]="on",
- [10761]="on",
- [10762]="on",
- [10763]="on",
- [10764]="on",
- [10765]="on",
- [10766]="on",
- [10767]="on",
- [10768]="on",
- [10769]="on",
- [10770]="on",
- [10771]="on",
- [10772]="on",
- [10773]="on",
- [10774]="on",
- [10775]="on",
- [10776]="on",
- [10777]="on",
- [10778]="on",
- [10779]="on",
- [10780]="on",
- [10781]="on",
- [10782]="on",
- [10783]="on",
- [10784]="on",
- [10785]="on",
- [10786]="on",
- [10787]="on",
- [10788]="on",
- [10789]="on",
- [10790]="on",
- [10791]="on",
- [10792]="on",
- [10793]="on",
- [10794]="on",
- [10795]="on",
- [10796]="on",
- [10797]="on",
- [10798]="on",
- [10799]="on",
- [10800]="on",
- [10801]="on",
- [10802]="on",
- [10803]="on",
- [10804]="on",
- [10805]="on",
- [10806]="on",
- [10807]="on",
- [10808]="on",
- [10809]="on",
- [10810]="on",
- [10811]="on",
- [10812]="on",
- [10813]="on",
- [10814]="on",
- [10815]="on",
- [10816]="on",
- [10817]="on",
- [10818]="on",
- [10819]="on",
- [10820]="on",
- [10821]="on",
- [10822]="on",
- [10823]="on",
- [10824]="on",
- [10825]="on",
- [10826]="on",
- [10827]="on",
- [10828]="on",
- [10829]="on",
- [10830]="on",
- [10831]="on",
- [10832]="on",
- [10833]="on",
- [10834]="on",
- [10835]="on",
- [10836]="on",
- [10837]="on",
- [10838]="on",
- [10839]="on",
- [10840]="on",
- [10841]="on",
- [10842]="on",
- [10843]="on",
- [10844]="on",
- [10845]="on",
- [10846]="on",
- [10847]="on",
- [10848]="on",
- [10849]="on",
- [10850]="on",
- [10851]="on",
- [10852]="on",
- [10853]="on",
- [10854]="on",
- [10855]="on",
- [10856]="on",
- [10857]="on",
- [10858]="on",
- [10859]="on",
- [10860]="on",
- [10861]="on",
- [10862]="on",
- [10863]="on",
- [10864]="on",
- [10865]="on",
- [10866]="on",
- [10867]="on",
- [10868]="on",
- [10869]="on",
- [10870]="on",
- [10871]="on",
- [10872]="on",
- [10873]="on",
- [10874]="on",
- [10875]="on",
- [10876]="on",
- [10877]="on",
- [10878]="on",
- [10879]="on",
- [10880]="on",
- [10881]="on",
- [10882]="on",
- [10883]="on",
- [10884]="on",
- [10885]="on",
- [10886]="on",
- [10887]="on",
- [10888]="on",
- [10889]="on",
- [10890]="on",
- [10891]="on",
- [10892]="on",
- [10893]="on",
- [10894]="on",
- [10895]="on",
- [10896]="on",
- [10897]="on",
- [10898]="on",
- [10899]="on",
- [10900]="on",
- [10901]="on",
- [10902]="on",
- [10903]="on",
- [10904]="on",
- [10905]="on",
- [10906]="on",
- [10907]="on",
- [10908]="on",
- [10909]="on",
- [10910]="on",
- [10911]="on",
- [10912]="on",
- [10913]="on",
- [10914]="on",
- [10915]="on",
- [10916]="on",
- [10917]="on",
- [10918]="on",
- [10919]="on",
- [10920]="on",
- [10921]="on",
- [10922]="on",
- [10923]="on",
- [10924]="on",
- [10925]="on",
- [10926]="on",
- [10927]="on",
- [10928]="on",
- [10929]="on",
- [10930]="on",
- [10931]="on",
- [10932]="on",
- [10933]="on",
- [10934]="on",
- [10935]="on",
- [10936]="on",
- [10937]="on",
- [10938]="on",
- [10939]="on",
- [10940]="on",
- [10941]="on",
- [10942]="on",
- [10943]="on",
- [10944]="on",
- [10945]="on",
- [10946]="on",
- [10947]="on",
- [10948]="on",
- [10949]="on",
- [10950]="on",
- [10951]="on",
- [10952]="on",
- [10953]="on",
- [10954]="on",
- [10955]="on",
- [10956]="on",
- [10957]="on",
- [10958]="on",
- [10959]="on",
- [10960]="on",
- [10961]="on",
- [10962]="on",
- [10963]="on",
- [10964]="on",
- [10965]="on",
- [10966]="on",
- [10967]="on",
- [10968]="on",
- [10969]="on",
- [10970]="on",
- [10971]="on",
- [10972]="on",
- [10973]="on",
- [10974]="on",
- [10975]="on",
- [10976]="on",
- [10977]="on",
- [10978]="on",
- [10979]="on",
- [10980]="on",
- [10981]="on",
- [10982]="on",
- [10983]="on",
- [10984]="on",
- [10985]="on",
- [10986]="on",
- [10987]="on",
- [10988]="on",
- [10989]="on",
- [10990]="on",
- [10991]="on",
- [10992]="on",
- [10993]="on",
- [10994]="on",
- [10995]="on",
- [10996]="on",
- [10997]="on",
- [10998]="on",
- [10999]="on",
- [11000]="on",
- [11001]="on",
- [11002]="on",
- [11003]="on",
- [11004]="on",
- [11005]="on",
- [11006]="on",
- [11007]="on",
- [11008]="on",
- [11009]="on",
- [11010]="on",
- [11011]="on",
- [11012]="on",
- [11013]="on",
- [11014]="on",
- [11015]="on",
- [11016]="on",
- [11017]="on",
- [11018]="on",
- [11019]="on",
- [11020]="on",
- [11021]="on",
- [11022]="on",
- [11023]="on",
- [11024]="on",
- [11025]="on",
- [11026]="on",
- [11027]="on",
- [11028]="on",
- [11029]="on",
- [11030]="on",
- [11031]="on",
- [11032]="on",
- [11033]="on",
- [11034]="on",
- [11035]="on",
- [11036]="on",
- [11037]="on",
- [11038]="on",
- [11039]="on",
- [11040]="on",
- [11041]="on",
- [11042]="on",
- [11043]="on",
- [11044]="on",
- [11045]="on",
- [11046]="on",
- [11047]="on",
- [11048]="on",
- [11049]="on",
- [11050]="on",
- [11051]="on",
- [11052]="on",
- [11053]="on",
- [11054]="on",
- [11055]="on",
- [11056]="on",
- [11057]="on",
- [11058]="on",
- [11059]="on",
- [11060]="on",
- [11061]="on",
- [11062]="on",
- [11063]="on",
- [11064]="on",
- [11065]="on",
- [11066]="on",
- [11067]="on",
- [11068]="on",
- [11069]="on",
- [11070]="on",
- [11071]="on",
- [11072]="on",
- [11073]="on",
- [11074]="on",
- [11075]="on",
- [11076]="on",
- [11077]="on",
- [11078]="on",
- [11079]="on",
- [11080]="on",
- [11081]="on",
- [11082]="on",
- [11083]="on",
- [11084]="on",
- [11085]="on",
- [11086]="on",
- [11087]="on",
- [11088]="on",
- [11089]="on",
- [11090]="on",
- [11091]="on",
- [11092]="on",
- [11093]="on",
- [11094]="on",
- [11095]="on",
- [11096]="on",
- [11097]="on",
- [11098]="on",
- [11099]="on",
- [11100]="on",
- [11101]="on",
- [11102]="on",
- [11103]="on",
- [11104]="on",
- [11105]="on",
- [11106]="on",
- [11107]="on",
- [11108]="on",
- [11109]="on",
- [11110]="on",
- [11111]="on",
- [11112]="on",
- [11113]="on",
- [11114]="on",
- [11115]="on",
- [11116]="on",
- [11117]="on",
- [11118]="on",
- [11119]="on",
- [11120]="on",
- [11121]="on",
- [11122]="on",
- [11123]="on",
- [11126]="on",
- [11127]="on",
- [11128]="on",
- [11129]="on",
- [11130]="on",
- [11131]="on",
- [11132]="on",
- [11133]="on",
- [11134]="on",
- [11135]="on",
- [11136]="on",
- [11137]="on",
- [11138]="on",
- [11139]="on",
- [11140]="on",
- [11141]="on",
- [11142]="on",
- [11143]="on",
- [11144]="on",
- [11145]="on",
- [11146]="on",
- [11147]="on",
- [11148]="on",
- [11149]="on",
- [11150]="on",
- [11151]="on",
- [11152]="on",
- [11153]="on",
- [11154]="on",
- [11155]="on",
- [11156]="on",
- [11157]="on",
- [11160]="on",
- [11161]="on",
- [11162]="on",
- [11163]="on",
- [11164]="on",
- [11165]="on",
- [11166]="on",
- [11167]="on",
- [11168]="on",
- [11169]="on",
- [11170]="on",
- [11171]="on",
- [11172]="on",
- [11173]="on",
- [11174]="on",
- [11175]="on",
- [11176]="on",
- [11177]="on",
- [11178]="on",
- [11179]="on",
- [11180]="on",
- [11181]="on",
- [11182]="on",
- [11183]="on",
- [11184]="on",
- [11185]="on",
- [11186]="on",
- [11187]="on",
- [11188]="on",
- [11189]="on",
- [11190]="on",
- [11191]="on",
- [11192]="on",
- [11193]="on",
- [11197]="on",
- [11198]="on",
- [11199]="on",
- [11200]="on",
- [11201]="on",
- [11202]="on",
- [11203]="on",
- [11204]="on",
- [11205]="on",
- [11206]="on",
- [11207]="on",
- [11208]="on",
- [11210]="on",
- [11211]="on",
- [11212]="on",
- [11213]="on",
- [11214]="on",
- [11215]="on",
- [11216]="on",
- [11217]="on",
- [11218]="on",
- [11244]="on",
- [11245]="on",
- [11246]="on",
- [11247]="on",
- [11493]="on",
- [11494]="on",
- [11495]="on",
- [11496]="on",
- [11497]="on",
- [11498]="on",
- [11503]="nsm",
- [11504]="nsm",
- [11505]="nsm",
- [11513]="on",
- [11514]="on",
- [11515]="on",
- [11516]="on",
- [11517]="on",
- [11518]="on",
- [11519]="on",
- [11647]="nsm",
- [11744]="nsm",
- [11745]="nsm",
- [11746]="nsm",
- [11747]="nsm",
- [11748]="nsm",
- [11749]="nsm",
- [11750]="nsm",
- [11751]="nsm",
- [11752]="nsm",
- [11753]="nsm",
- [11754]="nsm",
- [11755]="nsm",
- [11756]="nsm",
- [11757]="nsm",
- [11758]="nsm",
- [11759]="nsm",
- [11760]="nsm",
- [11761]="nsm",
- [11762]="nsm",
- [11763]="nsm",
- [11764]="nsm",
- [11765]="nsm",
- [11766]="nsm",
- [11767]="nsm",
- [11768]="nsm",
- [11769]="nsm",
- [11770]="nsm",
- [11771]="nsm",
- [11772]="nsm",
- [11773]="nsm",
- [11774]="nsm",
- [11775]="nsm",
- [11776]="on",
- [11777]="on",
- [11778]="on",
- [11779]="on",
- [11780]="on",
- [11781]="on",
- [11782]="on",
- [11783]="on",
- [11784]="on",
- [11785]="on",
- [11786]="on",
- [11787]="on",
- [11788]="on",
- [11789]="on",
- [11790]="on",
- [11791]="on",
- [11792]="on",
- [11793]="on",
- [11794]="on",
- [11795]="on",
- [11796]="on",
- [11797]="on",
- [11798]="on",
- [11799]="on",
- [11800]="on",
- [11801]="on",
- [11802]="on",
- [11803]="on",
- [11804]="on",
- [11805]="on",
- [11806]="on",
- [11807]="on",
- [11808]="on",
- [11809]="on",
- [11810]="on",
- [11811]="on",
- [11812]="on",
- [11813]="on",
- [11814]="on",
- [11815]="on",
- [11816]="on",
- [11817]="on",
- [11818]="on",
- [11819]="on",
- [11820]="on",
- [11821]="on",
- [11822]="on",
- [11823]="on",
- [11824]="on",
- [11825]="on",
- [11826]="on",
- [11827]="on",
- [11828]="on",
- [11829]="on",
- [11830]="on",
- [11831]="on",
- [11832]="on",
- [11833]="on",
- [11834]="on",
- [11835]="on",
- [11836]="on",
- [11837]="on",
- [11838]="on",
- [11839]="on",
- [11840]="on",
- [11841]="on",
- [11842]="on",
- [11843]="on",
- [11844]="on",
- [11845]="on",
- [11846]="on",
- [11847]="on",
- [11848]="on",
- [11849]="on",
- [11904]="on",
- [11905]="on",
- [11906]="on",
- [11907]="on",
- [11908]="on",
- [11909]="on",
- [11910]="on",
- [11911]="on",
- [11912]="on",
- [11913]="on",
- [11914]="on",
- [11915]="on",
- [11916]="on",
- [11917]="on",
- [11918]="on",
- [11919]="on",
- [11920]="on",
- [11921]="on",
- [11922]="on",
- [11923]="on",
- [11924]="on",
- [11925]="on",
- [11926]="on",
- [11927]="on",
- [11928]="on",
- [11929]="on",
- [11931]="on",
- [11932]="on",
- [11933]="on",
- [11934]="on",
- [11935]="on",
- [11936]="on",
- [11937]="on",
- [11938]="on",
- [11939]="on",
- [11940]="on",
- [11941]="on",
- [11942]="on",
- [11943]="on",
- [11944]="on",
- [11945]="on",
- [11946]="on",
- [11947]="on",
- [11948]="on",
- [11949]="on",
- [11950]="on",
- [11951]="on",
- [11952]="on",
- [11953]="on",
- [11954]="on",
- [11955]="on",
- [11956]="on",
- [11957]="on",
- [11958]="on",
- [11959]="on",
- [11960]="on",
- [11961]="on",
- [11962]="on",
- [11963]="on",
- [11964]="on",
- [11965]="on",
- [11966]="on",
- [11967]="on",
- [11968]="on",
- [11969]="on",
- [11970]="on",
- [11971]="on",
- [11972]="on",
- [11973]="on",
- [11974]="on",
- [11975]="on",
- [11976]="on",
- [11977]="on",
- [11978]="on",
- [11979]="on",
- [11980]="on",
- [11981]="on",
- [11982]="on",
- [11983]="on",
- [11984]="on",
- [11985]="on",
- [11986]="on",
- [11987]="on",
- [11988]="on",
- [11989]="on",
- [11990]="on",
- [11991]="on",
- [11992]="on",
- [11993]="on",
- [11994]="on",
- [11995]="on",
- [11996]="on",
- [11997]="on",
- [11998]="on",
- [11999]="on",
- [12000]="on",
- [12001]="on",
- [12002]="on",
- [12003]="on",
- [12004]="on",
- [12005]="on",
- [12006]="on",
- [12007]="on",
- [12008]="on",
- [12009]="on",
- [12010]="on",
- [12011]="on",
- [12012]="on",
- [12013]="on",
- [12014]="on",
- [12015]="on",
- [12016]="on",
- [12017]="on",
- [12018]="on",
- [12019]="on",
- [12032]="on",
- [12033]="on",
- [12034]="on",
- [12035]="on",
- [12036]="on",
- [12037]="on",
- [12038]="on",
- [12039]="on",
- [12040]="on",
- [12041]="on",
- [12042]="on",
- [12043]="on",
- [12044]="on",
- [12045]="on",
- [12046]="on",
- [12047]="on",
- [12048]="on",
- [12049]="on",
- [12050]="on",
- [12051]="on",
- [12052]="on",
- [12053]="on",
- [12054]="on",
- [12055]="on",
- [12056]="on",
- [12057]="on",
- [12058]="on",
- [12059]="on",
- [12060]="on",
- [12061]="on",
- [12062]="on",
- [12063]="on",
- [12064]="on",
- [12065]="on",
- [12066]="on",
- [12067]="on",
- [12068]="on",
- [12069]="on",
- [12070]="on",
- [12071]="on",
- [12072]="on",
- [12073]="on",
- [12074]="on",
- [12075]="on",
- [12076]="on",
- [12077]="on",
- [12078]="on",
- [12079]="on",
- [12080]="on",
- [12081]="on",
- [12082]="on",
- [12083]="on",
- [12084]="on",
- [12085]="on",
- [12086]="on",
- [12087]="on",
- [12088]="on",
- [12089]="on",
- [12090]="on",
- [12091]="on",
- [12092]="on",
- [12093]="on",
- [12094]="on",
- [12095]="on",
- [12096]="on",
- [12097]="on",
- [12098]="on",
- [12099]="on",
- [12100]="on",
- [12101]="on",
- [12102]="on",
- [12103]="on",
- [12104]="on",
- [12105]="on",
- [12106]="on",
- [12107]="on",
- [12108]="on",
- [12109]="on",
- [12110]="on",
- [12111]="on",
- [12112]="on",
- [12113]="on",
- [12114]="on",
- [12115]="on",
- [12116]="on",
- [12117]="on",
- [12118]="on",
- [12119]="on",
- [12120]="on",
- [12121]="on",
- [12122]="on",
- [12123]="on",
- [12124]="on",
- [12125]="on",
- [12126]="on",
- [12127]="on",
- [12128]="on",
- [12129]="on",
- [12130]="on",
- [12131]="on",
- [12132]="on",
- [12133]="on",
- [12134]="on",
- [12135]="on",
- [12136]="on",
- [12137]="on",
- [12138]="on",
- [12139]="on",
- [12140]="on",
- [12141]="on",
- [12142]="on",
- [12143]="on",
- [12144]="on",
- [12145]="on",
- [12146]="on",
- [12147]="on",
- [12148]="on",
- [12149]="on",
- [12150]="on",
- [12151]="on",
- [12152]="on",
- [12153]="on",
- [12154]="on",
- [12155]="on",
- [12156]="on",
- [12157]="on",
- [12158]="on",
- [12159]="on",
- [12160]="on",
- [12161]="on",
- [12162]="on",
- [12163]="on",
- [12164]="on",
- [12165]="on",
- [12166]="on",
- [12167]="on",
- [12168]="on",
- [12169]="on",
- [12170]="on",
- [12171]="on",
- [12172]="on",
- [12173]="on",
- [12174]="on",
- [12175]="on",
- [12176]="on",
- [12177]="on",
- [12178]="on",
- [12179]="on",
- [12180]="on",
- [12181]="on",
- [12182]="on",
- [12183]="on",
- [12184]="on",
- [12185]="on",
- [12186]="on",
- [12187]="on",
- [12188]="on",
- [12189]="on",
- [12190]="on",
- [12191]="on",
- [12192]="on",
- [12193]="on",
- [12194]="on",
- [12195]="on",
- [12196]="on",
- [12197]="on",
- [12198]="on",
- [12199]="on",
- [12200]="on",
- [12201]="on",
- [12202]="on",
- [12203]="on",
- [12204]="on",
- [12205]="on",
- [12206]="on",
- [12207]="on",
- [12208]="on",
- [12209]="on",
- [12210]="on",
- [12211]="on",
- [12212]="on",
- [12213]="on",
- [12214]="on",
- [12215]="on",
- [12216]="on",
- [12217]="on",
- [12218]="on",
- [12219]="on",
- [12220]="on",
- [12221]="on",
- [12222]="on",
- [12223]="on",
- [12224]="on",
- [12225]="on",
- [12226]="on",
- [12227]="on",
- [12228]="on",
- [12229]="on",
- [12230]="on",
- [12231]="on",
- [12232]="on",
- [12233]="on",
- [12234]="on",
- [12235]="on",
- [12236]="on",
- [12237]="on",
- [12238]="on",
- [12239]="on",
- [12240]="on",
- [12241]="on",
- [12242]="on",
- [12243]="on",
- [12244]="on",
- [12245]="on",
- [12272]="on",
- [12273]="on",
- [12274]="on",
- [12275]="on",
- [12276]="on",
- [12277]="on",
- [12278]="on",
- [12279]="on",
- [12280]="on",
- [12281]="on",
- [12282]="on",
- [12283]="on",
- [12288]="ws",
- [12289]="on",
- [12290]="on",
- [12291]="on",
- [12292]="on",
- [12296]="on",
- [12297]="on",
- [12298]="on",
- [12299]="on",
- [12300]="on",
- [12301]="on",
- [12302]="on",
- [12303]="on",
- [12304]="on",
- [12305]="on",
- [12306]="on",
- [12307]="on",
- [12308]="on",
- [12309]="on",
- [12310]="on",
- [12311]="on",
- [12312]="on",
- [12313]="on",
- [12314]="on",
- [12315]="on",
- [12316]="on",
- [12317]="on",
- [12318]="on",
- [12319]="on",
- [12320]="on",
- [12330]="nsm",
- [12331]="nsm",
- [12332]="nsm",
- [12333]="nsm",
- [12336]="on",
- [12342]="on",
- [12343]="on",
- [12349]="on",
- [12350]="on",
- [12351]="on",
- [12441]="nsm",
- [12442]="nsm",
- [12443]="on",
- [12444]="on",
- [12448]="on",
- [12539]="on",
- [12736]="on",
- [12737]="on",
- [12738]="on",
- [12739]="on",
- [12740]="on",
- [12741]="on",
- [12742]="on",
- [12743]="on",
- [12744]="on",
- [12745]="on",
- [12746]="on",
- [12747]="on",
- [12748]="on",
- [12749]="on",
- [12750]="on",
- [12751]="on",
- [12752]="on",
- [12753]="on",
- [12754]="on",
- [12755]="on",
- [12756]="on",
- [12757]="on",
- [12758]="on",
- [12759]="on",
- [12760]="on",
- [12761]="on",
- [12762]="on",
- [12763]="on",
- [12764]="on",
- [12765]="on",
- [12766]="on",
- [12767]="on",
- [12768]="on",
- [12769]="on",
- [12770]="on",
- [12771]="on",
- [12829]="on",
- [12830]="on",
- [12880]="on",
- [12881]="on",
- [12882]="on",
- [12883]="on",
- [12884]="on",
- [12885]="on",
- [12886]="on",
- [12887]="on",
- [12888]="on",
- [12889]="on",
- [12890]="on",
- [12891]="on",
- [12892]="on",
- [12893]="on",
- [12894]="on",
- [12895]="on",
- [12924]="on",
- [12925]="on",
- [12926]="on",
- [12977]="on",
- [12978]="on",
- [12979]="on",
- [12980]="on",
- [12981]="on",
- [12982]="on",
- [12983]="on",
- [12984]="on",
- [12985]="on",
- [12986]="on",
- [12987]="on",
- [12988]="on",
- [12989]="on",
- [12990]="on",
- [12991]="on",
- [13004]="on",
- [13005]="on",
- [13006]="on",
- [13007]="on",
- [13175]="on",
- [13176]="on",
- [13177]="on",
- [13178]="on",
- [13278]="on",
- [13279]="on",
- [13311]="on",
- [19904]="on",
- [19905]="on",
- [19906]="on",
- [19907]="on",
- [19908]="on",
- [19909]="on",
- [19910]="on",
- [19911]="on",
- [19912]="on",
- [19913]="on",
- [19914]="on",
- [19915]="on",
- [19916]="on",
- [19917]="on",
- [19918]="on",
- [19919]="on",
- [19920]="on",
- [19921]="on",
- [19922]="on",
- [19923]="on",
- [19924]="on",
- [19925]="on",
- [19926]="on",
- [19927]="on",
- [19928]="on",
- [19929]="on",
- [19930]="on",
- [19931]="on",
- [19932]="on",
- [19933]="on",
- [19934]="on",
- [19935]="on",
- [19936]="on",
- [19937]="on",
- [19938]="on",
- [19939]="on",
- [19940]="on",
- [19941]="on",
- [19942]="on",
- [19943]="on",
- [19944]="on",
- [19945]="on",
- [19946]="on",
- [19947]="on",
- [19948]="on",
- [19949]="on",
- [19950]="on",
- [19951]="on",
- [19952]="on",
- [19953]="on",
- [19954]="on",
- [19955]="on",
- [19956]="on",
- [19957]="on",
- [19958]="on",
- [19959]="on",
- [19960]="on",
- [19961]="on",
- [19962]="on",
- [19963]="on",
- [19964]="on",
- [19965]="on",
- [19966]="on",
- [19967]="on",
- [42128]="on",
- [42129]="on",
- [42130]="on",
- [42131]="on",
- [42132]="on",
- [42133]="on",
- [42134]="on",
- [42135]="on",
- [42136]="on",
- [42137]="on",
- [42138]="on",
- [42139]="on",
- [42140]="on",
- [42141]="on",
- [42142]="on",
- [42143]="on",
- [42144]="on",
- [42145]="on",
- [42146]="on",
- [42147]="on",
- [42148]="on",
- [42149]="on",
- [42150]="on",
- [42151]="on",
- [42152]="on",
- [42153]="on",
- [42154]="on",
- [42155]="on",
- [42156]="on",
- [42157]="on",
- [42158]="on",
- [42159]="on",
- [42160]="on",
- [42161]="on",
- [42162]="on",
- [42163]="on",
- [42164]="on",
- [42165]="on",
- [42166]="on",
- [42167]="on",
- [42168]="on",
- [42169]="on",
- [42170]="on",
- [42171]="on",
- [42172]="on",
- [42173]="on",
- [42174]="on",
- [42175]="on",
- [42176]="on",
- [42177]="on",
- [42178]="on",
- [42179]="on",
- [42180]="on",
- [42181]="on",
- [42182]="on",
- [42509]="on",
- [42510]="on",
- [42511]="on",
- [42607]="nsm",
- [42608]="nsm",
- [42609]="nsm",
- [42610]="nsm",
- [42611]="on",
- [42612]="nsm",
- [42613]="nsm",
- [42614]="nsm",
- [42615]="nsm",
- [42616]="nsm",
- [42617]="nsm",
- [42618]="nsm",
- [42619]="nsm",
- [42620]="nsm",
- [42621]="nsm",
- [42622]="on",
- [42623]="on",
- [42654]="nsm",
- [42655]="nsm",
- [42736]="nsm",
- [42737]="nsm",
- [42752]="on",
- [42753]="on",
- [42754]="on",
- [42755]="on",
- [42756]="on",
- [42757]="on",
- [42758]="on",
- [42759]="on",
- [42760]="on",
- [42761]="on",
- [42762]="on",
- [42763]="on",
- [42764]="on",
- [42765]="on",
- [42766]="on",
- [42767]="on",
- [42768]="on",
- [42769]="on",
- [42770]="on",
- [42771]="on",
- [42772]="on",
- [42773]="on",
- [42774]="on",
- [42775]="on",
- [42776]="on",
- [42777]="on",
- [42778]="on",
- [42779]="on",
- [42780]="on",
- [42781]="on",
- [42782]="on",
- [42783]="on",
- [42784]="on",
- [42785]="on",
- [42888]="on",
- [43010]="nsm",
- [43014]="nsm",
- [43019]="nsm",
- [43045]="nsm",
- [43046]="nsm",
- [43048]="on",
- [43049]="on",
- [43050]="on",
- [43051]="on",
- [43064]="et",
- [43065]="et",
- [43124]="on",
- [43125]="on",
- [43126]="on",
- [43127]="on",
- [43204]="nsm",
- [43205]="nsm",
- [43232]="nsm",
- [43233]="nsm",
- [43234]="nsm",
- [43235]="nsm",
- [43236]="nsm",
- [43237]="nsm",
- [43238]="nsm",
- [43239]="nsm",
- [43240]="nsm",
- [43241]="nsm",
- [43242]="nsm",
- [43243]="nsm",
- [43244]="nsm",
- [43245]="nsm",
- [43246]="nsm",
- [43247]="nsm",
- [43248]="nsm",
- [43249]="nsm",
- [43302]="nsm",
- [43303]="nsm",
- [43304]="nsm",
- [43305]="nsm",
- [43306]="nsm",
- [43307]="nsm",
- [43308]="nsm",
- [43309]="nsm",
- [43335]="nsm",
- [43336]="nsm",
- [43337]="nsm",
- [43338]="nsm",
- [43339]="nsm",
- [43340]="nsm",
- [43341]="nsm",
- [43342]="nsm",
- [43343]="nsm",
- [43344]="nsm",
- [43345]="nsm",
- [43392]="nsm",
- [43393]="nsm",
- [43394]="nsm",
- [43443]="nsm",
- [43446]="nsm",
- [43447]="nsm",
- [43448]="nsm",
- [43449]="nsm",
- [43452]="nsm",
- [43493]="nsm",
- [43561]="nsm",
- [43562]="nsm",
- [43563]="nsm",
- [43564]="nsm",
- [43565]="nsm",
- [43566]="nsm",
- [43569]="nsm",
- [43570]="nsm",
- [43573]="nsm",
- [43574]="nsm",
- [43587]="nsm",
- [43596]="nsm",
- [43644]="nsm",
- [43696]="nsm",
- [43698]="nsm",
- [43699]="nsm",
- [43700]="nsm",
- [43703]="nsm",
- [43704]="nsm",
- [43710]="nsm",
- [43711]="nsm",
- [43713]="nsm",
- [43756]="nsm",
- [43757]="nsm",
- [43766]="nsm",
- [44005]="nsm",
- [44008]="nsm",
- [44013]="nsm",
- [64285]="r",
- [64286]="nsm",
- [64287]="r",
- [64288]="r",
- [64289]="r",
- [64290]="r",
- [64291]="r",
- [64292]="r",
- [64293]="r",
- [64294]="r",
- [64295]="r",
- [64296]="r",
- [64297]="es",
- [64298]="r",
- [64299]="r",
- [64300]="r",
- [64301]="r",
- [64302]="r",
- [64303]="r",
- [64304]="r",
- [64305]="r",
- [64306]="r",
- [64307]="r",
- [64308]="r",
- [64309]="r",
- [64310]="r",
- [64312]="r",
- [64313]="r",
- [64314]="r",
- [64315]="r",
- [64316]="r",
- [64318]="r",
- [64320]="r",
- [64321]="r",
- [64323]="r",
- [64324]="r",
- [64326]="r",
- [64327]="r",
- [64328]="r",
- [64329]="r",
- [64330]="r",
- [64331]="r",
- [64332]="r",
- [64333]="r",
- [64334]="r",
- [64335]="r",
- [64336]="al",
- [64337]="al",
- [64338]="al",
- [64339]="al",
- [64340]="al",
- [64341]="al",
- [64342]="al",
- [64343]="al",
- [64344]="al",
- [64345]="al",
- [64346]="al",
- [64347]="al",
- [64348]="al",
- [64349]="al",
- [64350]="al",
- [64351]="al",
- [64352]="al",
- [64353]="al",
- [64354]="al",
- [64355]="al",
- [64356]="al",
- [64357]="al",
- [64358]="al",
- [64359]="al",
- [64360]="al",
- [64361]="al",
- [64362]="al",
- [64363]="al",
- [64364]="al",
- [64365]="al",
- [64366]="al",
- [64367]="al",
- [64368]="al",
- [64369]="al",
- [64370]="al",
- [64371]="al",
- [64372]="al",
- [64373]="al",
- [64374]="al",
- [64375]="al",
- [64376]="al",
- [64377]="al",
- [64378]="al",
- [64379]="al",
- [64380]="al",
- [64381]="al",
- [64382]="al",
- [64383]="al",
- [64384]="al",
- [64385]="al",
- [64386]="al",
- [64387]="al",
- [64388]="al",
- [64389]="al",
- [64390]="al",
- [64391]="al",
- [64392]="al",
- [64393]="al",
- [64394]="al",
- [64395]="al",
- [64396]="al",
- [64397]="al",
- [64398]="al",
- [64399]="al",
- [64400]="al",
- [64401]="al",
- [64402]="al",
- [64403]="al",
- [64404]="al",
- [64405]="al",
- [64406]="al",
- [64407]="al",
- [64408]="al",
- [64409]="al",
- [64410]="al",
- [64411]="al",
- [64412]="al",
- [64413]="al",
- [64414]="al",
- [64415]="al",
- [64416]="al",
- [64417]="al",
- [64418]="al",
- [64419]="al",
- [64420]="al",
- [64421]="al",
- [64422]="al",
- [64423]="al",
- [64424]="al",
- [64425]="al",
- [64426]="al",
- [64427]="al",
- [64428]="al",
- [64429]="al",
- [64430]="al",
- [64431]="al",
- [64432]="al",
- [64433]="al",
- [64434]="al",
- [64435]="al",
- [64436]="al",
- [64437]="al",
- [64438]="al",
- [64439]="al",
- [64440]="al",
- [64441]="al",
- [64442]="al",
- [64443]="al",
- [64444]="al",
- [64445]="al",
- [64446]="al",
- [64447]="al",
- [64448]="al",
- [64449]="al",
- [64467]="al",
- [64468]="al",
- [64469]="al",
- [64470]="al",
- [64471]="al",
- [64472]="al",
- [64473]="al",
- [64474]="al",
- [64475]="al",
- [64476]="al",
- [64477]="al",
- [64478]="al",
- [64479]="al",
- [64480]="al",
- [64481]="al",
- [64482]="al",
- [64483]="al",
- [64484]="al",
- [64485]="al",
- [64486]="al",
- [64487]="al",
- [64488]="al",
- [64489]="al",
- [64490]="al",
- [64491]="al",
- [64492]="al",
- [64493]="al",
- [64494]="al",
- [64495]="al",
- [64496]="al",
- [64497]="al",
- [64498]="al",
- [64499]="al",
- [64500]="al",
- [64501]="al",
- [64502]="al",
- [64503]="al",
- [64504]="al",
- [64505]="al",
- [64506]="al",
- [64507]="al",
- [64508]="al",
- [64509]="al",
- [64510]="al",
- [64511]="al",
- [64512]="al",
- [64513]="al",
- [64514]="al",
- [64515]="al",
- [64516]="al",
- [64517]="al",
- [64518]="al",
- [64519]="al",
- [64520]="al",
- [64521]="al",
- [64522]="al",
- [64523]="al",
- [64524]="al",
- [64525]="al",
- [64526]="al",
- [64527]="al",
- [64528]="al",
- [64529]="al",
- [64530]="al",
- [64531]="al",
- [64532]="al",
- [64533]="al",
- [64534]="al",
- [64535]="al",
- [64536]="al",
- [64537]="al",
- [64538]="al",
- [64539]="al",
- [64540]="al",
- [64541]="al",
- [64542]="al",
- [64543]="al",
- [64544]="al",
- [64545]="al",
- [64546]="al",
- [64547]="al",
- [64548]="al",
- [64549]="al",
- [64550]="al",
- [64551]="al",
- [64552]="al",
- [64553]="al",
- [64554]="al",
- [64555]="al",
- [64556]="al",
- [64557]="al",
- [64558]="al",
- [64559]="al",
- [64560]="al",
- [64561]="al",
- [64562]="al",
- [64563]="al",
- [64564]="al",
- [64565]="al",
- [64566]="al",
- [64567]="al",
- [64568]="al",
- [64569]="al",
- [64570]="al",
- [64571]="al",
- [64572]="al",
- [64573]="al",
- [64574]="al",
- [64575]="al",
- [64576]="al",
- [64577]="al",
- [64578]="al",
- [64579]="al",
- [64580]="al",
- [64581]="al",
- [64582]="al",
- [64583]="al",
- [64584]="al",
- [64585]="al",
- [64586]="al",
- [64587]="al",
- [64588]="al",
- [64589]="al",
- [64590]="al",
- [64591]="al",
- [64592]="al",
- [64593]="al",
- [64594]="al",
- [64595]="al",
- [64596]="al",
- [64597]="al",
- [64598]="al",
- [64599]="al",
- [64600]="al",
- [64601]="al",
- [64602]="al",
- [64603]="al",
- [64604]="al",
- [64605]="al",
- [64606]="al",
- [64607]="al",
- [64608]="al",
- [64609]="al",
- [64610]="al",
- [64611]="al",
- [64612]="al",
- [64613]="al",
- [64614]="al",
- [64615]="al",
- [64616]="al",
- [64617]="al",
- [64618]="al",
- [64619]="al",
- [64620]="al",
- [64621]="al",
- [64622]="al",
- [64623]="al",
- [64624]="al",
- [64625]="al",
- [64626]="al",
- [64627]="al",
- [64628]="al",
- [64629]="al",
- [64630]="al",
- [64631]="al",
- [64632]="al",
- [64633]="al",
- [64634]="al",
- [64635]="al",
- [64636]="al",
- [64637]="al",
- [64638]="al",
- [64639]="al",
- [64640]="al",
- [64641]="al",
- [64642]="al",
- [64643]="al",
- [64644]="al",
- [64645]="al",
- [64646]="al",
- [64647]="al",
- [64648]="al",
- [64649]="al",
- [64650]="al",
- [64651]="al",
- [64652]="al",
- [64653]="al",
- [64654]="al",
- [64655]="al",
- [64656]="al",
- [64657]="al",
- [64658]="al",
- [64659]="al",
- [64660]="al",
- [64661]="al",
- [64662]="al",
- [64663]="al",
- [64664]="al",
- [64665]="al",
- [64666]="al",
- [64667]="al",
- [64668]="al",
- [64669]="al",
- [64670]="al",
- [64671]="al",
- [64672]="al",
- [64673]="al",
- [64674]="al",
- [64675]="al",
- [64676]="al",
- [64677]="al",
- [64678]="al",
- [64679]="al",
- [64680]="al",
- [64681]="al",
- [64682]="al",
- [64683]="al",
- [64684]="al",
- [64685]="al",
- [64686]="al",
- [64687]="al",
- [64688]="al",
- [64689]="al",
- [64690]="al",
- [64691]="al",
- [64692]="al",
- [64693]="al",
- [64694]="al",
- [64695]="al",
- [64696]="al",
- [64697]="al",
- [64698]="al",
- [64699]="al",
- [64700]="al",
- [64701]="al",
- [64702]="al",
- [64703]="al",
- [64704]="al",
- [64705]="al",
- [64706]="al",
- [64707]="al",
- [64708]="al",
- [64709]="al",
- [64710]="al",
- [64711]="al",
- [64712]="al",
- [64713]="al",
- [64714]="al",
- [64715]="al",
- [64716]="al",
- [64717]="al",
- [64718]="al",
- [64719]="al",
- [64720]="al",
- [64721]="al",
- [64722]="al",
- [64723]="al",
- [64724]="al",
- [64725]="al",
- [64726]="al",
- [64727]="al",
- [64728]="al",
- [64729]="al",
- [64730]="al",
- [64731]="al",
- [64732]="al",
- [64733]="al",
- [64734]="al",
- [64735]="al",
- [64736]="al",
- [64737]="al",
- [64738]="al",
- [64739]="al",
- [64740]="al",
- [64741]="al",
- [64742]="al",
- [64743]="al",
- [64744]="al",
- [64745]="al",
- [64746]="al",
- [64747]="al",
- [64748]="al",
- [64749]="al",
- [64750]="al",
- [64751]="al",
- [64752]="al",
- [64753]="al",
- [64754]="al",
- [64755]="al",
- [64756]="al",
- [64757]="al",
- [64758]="al",
- [64759]="al",
- [64760]="al",
- [64761]="al",
- [64762]="al",
- [64763]="al",
- [64764]="al",
- [64765]="al",
- [64766]="al",
- [64767]="al",
- [64768]="al",
- [64769]="al",
- [64770]="al",
- [64771]="al",
- [64772]="al",
- [64773]="al",
- [64774]="al",
- [64775]="al",
- [64776]="al",
- [64777]="al",
- [64778]="al",
- [64779]="al",
- [64780]="al",
- [64781]="al",
- [64782]="al",
- [64783]="al",
- [64784]="al",
- [64785]="al",
- [64786]="al",
- [64787]="al",
- [64788]="al",
- [64789]="al",
- [64790]="al",
- [64791]="al",
- [64792]="al",
- [64793]="al",
- [64794]="al",
- [64795]="al",
- [64796]="al",
- [64797]="al",
- [64798]="al",
- [64799]="al",
- [64800]="al",
- [64801]="al",
- [64802]="al",
- [64803]="al",
- [64804]="al",
- [64805]="al",
- [64806]="al",
- [64807]="al",
- [64808]="al",
- [64809]="al",
- [64810]="al",
- [64811]="al",
- [64812]="al",
- [64813]="al",
- [64814]="al",
- [64815]="al",
- [64816]="al",
- [64817]="al",
- [64818]="al",
- [64819]="al",
- [64820]="al",
- [64821]="al",
- [64822]="al",
- [64823]="al",
- [64824]="al",
- [64825]="al",
- [64826]="al",
- [64827]="al",
- [64828]="al",
- [64829]="al",
- [64830]="on",
- [64831]="on",
- [64848]="al",
- [64849]="al",
- [64850]="al",
- [64851]="al",
- [64852]="al",
- [64853]="al",
- [64854]="al",
- [64855]="al",
- [64856]="al",
- [64857]="al",
- [64858]="al",
- [64859]="al",
- [64860]="al",
- [64861]="al",
- [64862]="al",
- [64863]="al",
- [64864]="al",
- [64865]="al",
- [64866]="al",
- [64867]="al",
- [64868]="al",
- [64869]="al",
- [64870]="al",
- [64871]="al",
- [64872]="al",
- [64873]="al",
- [64874]="al",
- [64875]="al",
- [64876]="al",
- [64877]="al",
- [64878]="al",
- [64879]="al",
- [64880]="al",
- [64881]="al",
- [64882]="al",
- [64883]="al",
- [64884]="al",
- [64885]="al",
- [64886]="al",
- [64887]="al",
- [64888]="al",
- [64889]="al",
- [64890]="al",
- [64891]="al",
- [64892]="al",
- [64893]="al",
- [64894]="al",
- [64895]="al",
- [64896]="al",
- [64897]="al",
- [64898]="al",
- [64899]="al",
- [64900]="al",
- [64901]="al",
- [64902]="al",
- [64903]="al",
- [64904]="al",
- [64905]="al",
- [64906]="al",
- [64907]="al",
- [64908]="al",
- [64909]="al",
- [64910]="al",
- [64911]="al",
- [64914]="al",
- [64915]="al",
- [64916]="al",
- [64917]="al",
- [64918]="al",
- [64919]="al",
- [64920]="al",
- [64921]="al",
- [64922]="al",
- [64923]="al",
- [64924]="al",
- [64925]="al",
- [64926]="al",
- [64927]="al",
- [64928]="al",
- [64929]="al",
- [64930]="al",
- [64931]="al",
- [64932]="al",
- [64933]="al",
- [64934]="al",
- [64935]="al",
- [64936]="al",
- [64937]="al",
- [64938]="al",
- [64939]="al",
- [64940]="al",
- [64941]="al",
- [64942]="al",
- [64943]="al",
- [64944]="al",
- [64945]="al",
- [64946]="al",
- [64947]="al",
- [64948]="al",
- [64949]="al",
- [64950]="al",
- [64951]="al",
- [64952]="al",
- [64953]="al",
- [64954]="al",
- [64955]="al",
- [64956]="al",
- [64957]="al",
- [64958]="al",
- [64959]="al",
- [64960]="al",
- [64961]="al",
- [64962]="al",
- [64963]="al",
- [64964]="al",
- [64965]="al",
- [64966]="al",
- [64967]="al",
- [65008]="al",
- [65009]="al",
- [65010]="al",
- [65011]="al",
- [65012]="al",
- [65013]="al",
- [65014]="al",
- [65015]="al",
- [65016]="al",
- [65017]="al",
- [65018]="al",
- [65019]="al",
- [65020]="al",
- [65021]="on",
- [65040]="on",
- [65041]="on",
- [65042]="on",
- [65043]="on",
- [65044]="on",
- [65045]="on",
- [65046]="on",
- [65047]="on",
- [65048]="on",
- [65049]="on",
- [65056]="nsm",
- [65057]="nsm",
- [65058]="nsm",
- [65059]="nsm",
- [65060]="nsm",
- [65061]="nsm",
- [65062]="nsm",
- [65063]="nsm",
- [65064]="nsm",
- [65065]="nsm",
- [65066]="nsm",
- [65067]="nsm",
- [65068]="nsm",
- [65069]="nsm",
- [65070]="nsm",
- [65071]="nsm",
- [65072]="on",
- [65073]="on",
- [65074]="on",
- [65075]="on",
- [65076]="on",
- [65077]="on",
- [65078]="on",
- [65079]="on",
- [65080]="on",
- [65081]="on",
- [65082]="on",
- [65083]="on",
- [65084]="on",
- [65085]="on",
- [65086]="on",
- [65087]="on",
- [65088]="on",
- [65089]="on",
- [65090]="on",
- [65091]="on",
- [65092]="on",
- [65093]="on",
- [65094]="on",
- [65095]="on",
- [65096]="on",
- [65097]="on",
- [65098]="on",
- [65099]="on",
- [65100]="on",
- [65101]="on",
- [65102]="on",
- [65103]="on",
- [65104]="cs",
- [65105]="on",
- [65106]="cs",
- [65108]="on",
- [65109]="cs",
- [65110]="on",
- [65111]="on",
- [65112]="on",
- [65113]="on",
- [65114]="on",
- [65115]="on",
- [65116]="on",
- [65117]="on",
- [65118]="on",
- [65119]="et",
- [65120]="on",
- [65121]="on",
- [65122]="es",
- [65123]="es",
- [65124]="on",
- [65125]="on",
- [65126]="on",
- [65128]="on",
- [65129]="et",
- [65130]="et",
- [65131]="on",
- [65136]="al",
- [65137]="al",
- [65138]="al",
- [65139]="al",
- [65140]="al",
- [65142]="al",
- [65143]="al",
- [65144]="al",
- [65145]="al",
- [65146]="al",
- [65147]="al",
- [65148]="al",
- [65149]="al",
- [65150]="al",
- [65151]="al",
- [65152]="al",
- [65153]="al",
- [65154]="al",
- [65155]="al",
- [65156]="al",
- [65157]="al",
- [65158]="al",
- [65159]="al",
- [65160]="al",
- [65161]="al",
- [65162]="al",
- [65163]="al",
- [65164]="al",
- [65165]="al",
- [65166]="al",
- [65167]="al",
- [65168]="al",
- [65169]="al",
- [65170]="al",
- [65171]="al",
- [65172]="al",
- [65173]="al",
- [65174]="al",
- [65175]="al",
- [65176]="al",
- [65177]="al",
- [65178]="al",
- [65179]="al",
- [65180]="al",
- [65181]="al",
- [65182]="al",
- [65183]="al",
- [65184]="al",
- [65185]="al",
- [65186]="al",
- [65187]="al",
- [65188]="al",
- [65189]="al",
- [65190]="al",
- [65191]="al",
- [65192]="al",
- [65193]="al",
- [65194]="al",
- [65195]="al",
- [65196]="al",
- [65197]="al",
- [65198]="al",
- [65199]="al",
- [65200]="al",
- [65201]="al",
- [65202]="al",
- [65203]="al",
- [65204]="al",
- [65205]="al",
- [65206]="al",
- [65207]="al",
- [65208]="al",
- [65209]="al",
- [65210]="al",
- [65211]="al",
- [65212]="al",
- [65213]="al",
- [65214]="al",
- [65215]="al",
- [65216]="al",
- [65217]="al",
- [65218]="al",
- [65219]="al",
- [65220]="al",
- [65221]="al",
- [65222]="al",
- [65223]="al",
- [65224]="al",
- [65225]="al",
- [65226]="al",
- [65227]="al",
- [65228]="al",
- [65229]="al",
- [65230]="al",
- [65231]="al",
- [65232]="al",
- [65233]="al",
- [65234]="al",
- [65235]="al",
- [65236]="al",
- [65237]="al",
- [65238]="al",
- [65239]="al",
- [65240]="al",
- [65241]="al",
- [65242]="al",
- [65243]="al",
- [65244]="al",
- [65245]="al",
- [65246]="al",
- [65247]="al",
- [65248]="al",
- [65249]="al",
- [65250]="al",
- [65251]="al",
- [65252]="al",
- [65253]="al",
- [65254]="al",
- [65255]="al",
- [65256]="al",
- [65257]="al",
- [65258]="al",
- [65259]="al",
- [65260]="al",
- [65261]="al",
- [65262]="al",
- [65263]="al",
- [65264]="al",
- [65265]="al",
- [65266]="al",
- [65267]="al",
- [65268]="al",
- [65269]="al",
- [65270]="al",
- [65271]="al",
- [65272]="al",
- [65273]="al",
- [65274]="al",
- [65275]="al",
- [65276]="al",
- [65279]="bn",
- [65281]="on",
- [65282]="on",
- [65283]="et",
- [65284]="et",
- [65285]="et",
- [65286]="on",
- [65287]="on",
- [65288]="on",
- [65289]="on",
- [65290]="on",
- [65291]="es",
- [65292]="cs",
- [65293]="es",
- [65294]="cs",
- [65295]="cs",
- [65296]="en",
- [65297]="en",
- [65298]="en",
- [65299]="en",
- [65300]="en",
- [65301]="en",
- [65302]="en",
- [65303]="en",
- [65304]="en",
- [65305]="en",
- [65306]="cs",
- [65307]="on",
- [65308]="on",
- [65309]="on",
- [65310]="on",
- [65311]="on",
- [65312]="on",
- [65339]="on",
- [65340]="on",
- [65341]="on",
- [65342]="on",
- [65343]="on",
- [65344]="on",
- [65371]="on",
- [65372]="on",
- [65373]="on",
- [65374]="on",
- [65375]="on",
- [65376]="on",
- [65377]="on",
- [65378]="on",
- [65379]="on",
- [65380]="on",
- [65381]="on",
- [65504]="et",
- [65505]="et",
- [65506]="on",
- [65507]="on",
- [65508]="on",
- [65509]="et",
- [65510]="et",
- [65512]="on",
- [65513]="on",
- [65514]="on",
- [65515]="on",
- [65516]="on",
- [65517]="on",
- [65518]="on",
- [65529]="on",
- [65530]="on",
- [65531]="on",
- [65532]="on",
- [65533]="on",
- [65793]="on",
- [65856]="on",
- [65857]="on",
- [65858]="on",
- [65859]="on",
- [65860]="on",
- [65861]="on",
- [65862]="on",
- [65863]="on",
- [65864]="on",
- [65865]="on",
- [65866]="on",
- [65867]="on",
- [65868]="on",
- [65869]="on",
- [65870]="on",
- [65871]="on",
- [65872]="on",
- [65873]="on",
- [65874]="on",
- [65875]="on",
- [65876]="on",
- [65877]="on",
- [65878]="on",
- [65879]="on",
- [65880]="on",
- [65881]="on",
- [65882]="on",
- [65883]="on",
- [65884]="on",
- [65885]="on",
- [65886]="on",
- [65887]="on",
- [65888]="on",
- [65889]="on",
- [65890]="on",
- [65891]="on",
- [65892]="on",
- [65893]="on",
- [65894]="on",
- [65895]="on",
- [65896]="on",
- [65897]="on",
- [65898]="on",
- [65899]="on",
- [65900]="on",
- [65901]="on",
- [65902]="on",
- [65903]="on",
- [65904]="on",
- [65905]="on",
- [65906]="on",
- [65907]="on",
- [65908]="on",
- [65909]="on",
- [65910]="on",
- [65911]="on",
- [65912]="on",
- [65913]="on",
- [65914]="on",
- [65915]="on",
- [65916]="on",
- [65917]="on",
- [65918]="on",
- [65919]="on",
- [65920]="on",
- [65921]="on",
- [65922]="on",
- [65923]="on",
- [65924]="on",
- [65925]="on",
- [65926]="on",
- [65927]="on",
- [65928]="on",
- [65929]="on",
- [65930]="on",
- [65931]="on",
- [65932]="on",
- [65936]="on",
- [65937]="on",
- [65938]="on",
- [65939]="on",
- [65940]="on",
- [65941]="on",
- [65942]="on",
- [65943]="on",
- [65944]="on",
- [65945]="on",
- [65946]="on",
- [65947]="on",
- [65952]="on",
- [66045]="nsm",
- [66272]="nsm",
- [66273]="en",
- [66274]="en",
- [66275]="en",
- [66276]="en",
- [66277]="en",
- [66278]="en",
- [66279]="en",
- [66280]="en",
- [66281]="en",
- [66282]="en",
- [66283]="en",
- [66284]="en",
- [66285]="en",
- [66286]="en",
- [66287]="en",
- [66288]="en",
- [66289]="en",
- [66290]="en",
- [66291]="en",
- [66292]="en",
- [66293]="en",
- [66294]="en",
- [66295]="en",
- [66296]="en",
- [66297]="en",
- [66298]="en",
- [66299]="en",
- [66422]="nsm",
- [66423]="nsm",
- [66424]="nsm",
- [66425]="nsm",
- [66426]="nsm",
- [67584]="r",
- [67585]="r",
- [67586]="r",
- [67587]="r",
- [67588]="r",
- [67589]="r",
- [67592]="r",
- [67594]="r",
- [67595]="r",
- [67596]="r",
- [67597]="r",
- [67598]="r",
- [67599]="r",
- [67600]="r",
- [67601]="r",
- [67602]="r",
- [67603]="r",
- [67604]="r",
- [67605]="r",
- [67606]="r",
- [67607]="r",
- [67608]="r",
- [67609]="r",
- [67610]="r",
- [67611]="r",
- [67612]="r",
- [67613]="r",
- [67614]="r",
- [67615]="r",
- [67616]="r",
- [67617]="r",
- [67618]="r",
- [67619]="r",
- [67620]="r",
- [67621]="r",
- [67622]="r",
- [67623]="r",
- [67624]="r",
- [67625]="r",
- [67626]="r",
- [67627]="r",
- [67628]="r",
- [67629]="r",
- [67630]="r",
- [67631]="r",
- [67632]="r",
- [67633]="r",
- [67634]="r",
- [67635]="r",
- [67636]="r",
- [67637]="r",
- [67639]="r",
- [67640]="r",
- [67644]="r",
- [67647]="r",
- [67648]="r",
- [67649]="r",
- [67650]="r",
- [67651]="r",
- [67652]="r",
- [67653]="r",
- [67654]="r",
- [67655]="r",
- [67656]="r",
- [67657]="r",
- [67658]="r",
- [67659]="r",
- [67660]="r",
- [67661]="r",
- [67662]="r",
- [67663]="r",
- [67664]="r",
- [67665]="r",
- [67666]="r",
- [67667]="r",
- [67668]="r",
- [67669]="r",
- [67671]="r",
- [67672]="r",
- [67673]="r",
- [67674]="r",
- [67675]="r",
- [67676]="r",
- [67677]="r",
- [67678]="r",
- [67679]="r",
- [67680]="r",
- [67681]="r",
- [67682]="r",
- [67683]="r",
- [67684]="r",
- [67685]="r",
- [67686]="r",
- [67687]="r",
- [67688]="r",
- [67689]="r",
- [67690]="r",
- [67691]="r",
- [67692]="r",
- [67693]="r",
- [67694]="r",
- [67695]="r",
- [67696]="r",
- [67697]="r",
- [67698]="r",
- [67699]="r",
- [67700]="r",
- [67701]="r",
- [67702]="r",
- [67703]="r",
- [67704]="r",
- [67705]="r",
- [67706]="r",
- [67707]="r",
- [67708]="r",
- [67709]="r",
- [67710]="r",
- [67711]="r",
- [67712]="r",
- [67713]="r",
- [67714]="r",
- [67715]="r",
- [67716]="r",
- [67717]="r",
- [67718]="r",
- [67719]="r",
- [67720]="r",
- [67721]="r",
- [67722]="r",
- [67723]="r",
- [67724]="r",
- [67725]="r",
- [67726]="r",
- [67727]="r",
- [67728]="r",
- [67729]="r",
- [67730]="r",
- [67731]="r",
- [67732]="r",
- [67733]="r",
- [67734]="r",
- [67735]="r",
- [67736]="r",
- [67737]="r",
- [67738]="r",
- [67739]="r",
- [67740]="r",
- [67741]="r",
- [67742]="r",
- [67751]="r",
- [67752]="r",
- [67753]="r",
- [67754]="r",
- [67755]="r",
- [67756]="r",
- [67757]="r",
- [67758]="r",
- [67759]="r",
- [67808]="r",
- [67809]="r",
- [67810]="r",
- [67811]="r",
- [67812]="r",
- [67813]="r",
- [67814]="r",
- [67815]="r",
- [67816]="r",
- [67817]="r",
- [67818]="r",
- [67819]="r",
- [67820]="r",
- [67821]="r",
- [67822]="r",
- [67823]="r",
- [67824]="r",
- [67825]="r",
- [67826]="r",
- [67828]="r",
- [67829]="r",
- [67835]="r",
- [67836]="r",
- [67837]="r",
- [67838]="r",
- [67839]="r",
- [67840]="r",
- [67841]="r",
- [67842]="r",
- [67843]="r",
- [67844]="r",
- [67845]="r",
- [67846]="r",
- [67847]="r",
- [67848]="r",
- [67849]="r",
- [67850]="r",
- [67851]="r",
- [67852]="r",
- [67853]="r",
- [67854]="r",
- [67855]="r",
- [67856]="r",
- [67857]="r",
- [67858]="r",
- [67859]="r",
- [67860]="r",
- [67861]="r",
- [67862]="r",
- [67863]="r",
- [67864]="r",
- [67865]="r",
- [67866]="r",
- [67867]="r",
- [67871]="on",
- [67872]="r",
- [67873]="r",
- [67874]="r",
- [67875]="r",
- [67876]="r",
- [67877]="r",
- [67878]="r",
- [67879]="r",
- [67880]="r",
- [67881]="r",
- [67882]="r",
- [67883]="r",
- [67884]="r",
- [67885]="r",
- [67886]="r",
- [67887]="r",
- [67888]="r",
- [67889]="r",
- [67890]="r",
- [67891]="r",
- [67892]="r",
- [67893]="r",
- [67894]="r",
- [67895]="r",
- [67896]="r",
- [67897]="r",
- [67903]="r",
- [67968]="r",
- [67969]="r",
- [67970]="r",
- [67971]="r",
- [67972]="r",
- [67973]="r",
- [67974]="r",
- [67975]="r",
- [67976]="r",
- [67977]="r",
- [67978]="r",
- [67979]="r",
- [67980]="r",
- [67981]="r",
- [67982]="r",
- [67983]="r",
- [67984]="r",
- [67985]="r",
- [67986]="r",
- [67987]="r",
- [67988]="r",
- [67989]="r",
- [67990]="r",
- [67991]="r",
- [67992]="r",
- [67993]="r",
- [67994]="r",
- [67995]="r",
- [67996]="r",
- [67997]="r",
- [67998]="r",
- [67999]="r",
- [68000]="r",
- [68001]="r",
- [68002]="r",
- [68003]="r",
- [68004]="r",
- [68005]="r",
- [68006]="r",
- [68007]="r",
- [68008]="r",
- [68009]="r",
- [68010]="r",
- [68011]="r",
- [68012]="r",
- [68013]="r",
- [68014]="r",
- [68015]="r",
- [68016]="r",
- [68017]="r",
- [68018]="r",
- [68019]="r",
- [68020]="r",
- [68021]="r",
- [68022]="r",
- [68023]="r",
- [68028]="r",
- [68029]="r",
- [68030]="r",
- [68031]="r",
- [68032]="r",
- [68033]="r",
- [68034]="r",
- [68035]="r",
- [68036]="r",
- [68037]="r",
- [68038]="r",
- [68039]="r",
- [68040]="r",
- [68041]="r",
- [68042]="r",
- [68043]="r",
- [68044]="r",
- [68045]="r",
- [68046]="r",
- [68047]="r",
- [68050]="r",
- [68051]="r",
- [68052]="r",
- [68053]="r",
- [68054]="r",
- [68055]="r",
- [68056]="r",
- [68057]="r",
- [68058]="r",
- [68059]="r",
- [68060]="r",
- [68061]="r",
- [68062]="r",
- [68063]="r",
- [68064]="r",
- [68065]="r",
- [68066]="r",
- [68067]="r",
- [68068]="r",
- [68069]="r",
- [68070]="r",
- [68071]="r",
- [68072]="r",
- [68073]="r",
- [68074]="r",
- [68075]="r",
- [68076]="r",
- [68077]="r",
- [68078]="r",
- [68079]="r",
- [68080]="r",
- [68081]="r",
- [68082]="r",
- [68083]="r",
- [68084]="r",
- [68085]="r",
- [68086]="r",
- [68087]="r",
- [68088]="r",
- [68089]="r",
- [68090]="r",
- [68091]="r",
- [68092]="r",
- [68093]="r",
- [68094]="r",
- [68095]="r",
- [68096]="r",
- [68097]="nsm",
- [68098]="nsm",
- [68099]="nsm",
- [68101]="nsm",
- [68102]="nsm",
- [68108]="nsm",
- [68109]="nsm",
- [68110]="nsm",
- [68111]="nsm",
- [68112]="r",
- [68113]="r",
- [68114]="r",
- [68115]="r",
- [68117]="r",
- [68118]="r",
- [68119]="r",
- [68121]="r",
- [68122]="r",
- [68123]="r",
- [68124]="r",
- [68125]="r",
- [68126]="r",
- [68127]="r",
- [68128]="r",
- [68129]="r",
- [68130]="r",
- [68131]="r",
- [68132]="r",
- [68133]="r",
- [68134]="r",
- [68135]="r",
- [68136]="r",
- [68137]="r",
- [68138]="r",
- [68139]="r",
- [68140]="r",
- [68141]="r",
- [68142]="r",
- [68143]="r",
- [68144]="r",
- [68145]="r",
- [68146]="r",
- [68147]="r",
- [68152]="nsm",
- [68153]="nsm",
- [68154]="nsm",
- [68159]="nsm",
- [68160]="r",
- [68161]="r",
- [68162]="r",
- [68163]="r",
- [68164]="r",
- [68165]="r",
- [68166]="r",
- [68167]="r",
- [68176]="r",
- [68177]="r",
- [68178]="r",
- [68179]="r",
- [68180]="r",
- [68181]="r",
- [68182]="r",
- [68183]="r",
- [68184]="r",
- [68192]="r",
- [68193]="r",
- [68194]="r",
- [68195]="r",
- [68196]="r",
- [68197]="r",
- [68198]="r",
- [68199]="r",
- [68200]="r",
- [68201]="r",
- [68202]="r",
- [68203]="r",
- [68204]="r",
- [68205]="r",
- [68206]="r",
- [68207]="r",
- [68208]="r",
- [68209]="r",
- [68210]="r",
- [68211]="r",
- [68212]="r",
- [68213]="r",
- [68214]="r",
- [68215]="r",
- [68216]="r",
- [68217]="r",
- [68218]="r",
- [68219]="r",
- [68220]="r",
- [68221]="r",
- [68222]="r",
- [68223]="r",
- [68224]="r",
- [68225]="r",
- [68226]="r",
- [68227]="r",
- [68228]="r",
- [68229]="r",
- [68230]="r",
- [68231]="r",
- [68232]="r",
- [68233]="r",
- [68234]="r",
- [68235]="r",
- [68236]="r",
- [68237]="r",
- [68238]="r",
- [68239]="r",
- [68240]="r",
- [68241]="r",
- [68242]="r",
- [68243]="r",
- [68244]="r",
- [68245]="r",
- [68246]="r",
- [68247]="r",
- [68248]="r",
- [68249]="r",
- [68250]="r",
- [68251]="r",
- [68252]="r",
- [68253]="r",
- [68254]="r",
- [68255]="r",
- [68288]="r",
- [68289]="r",
- [68290]="r",
- [68291]="r",
- [68292]="r",
- [68293]="r",
- [68294]="r",
- [68295]="r",
- [68296]="r",
- [68297]="r",
- [68298]="r",
- [68299]="r",
- [68300]="r",
- [68301]="r",
- [68302]="r",
- [68303]="r",
- [68304]="r",
- [68305]="r",
- [68306]="r",
- [68307]="r",
- [68308]="r",
- [68309]="r",
- [68310]="r",
- [68311]="r",
- [68312]="r",
- [68313]="r",
- [68314]="r",
- [68315]="r",
- [68316]="r",
- [68317]="r",
- [68318]="r",
- [68319]="r",
- [68320]="r",
- [68321]="r",
- [68322]="r",
- [68323]="r",
- [68324]="r",
- [68325]="nsm",
- [68326]="nsm",
- [68331]="r",
- [68332]="r",
- [68333]="r",
- [68334]="r",
- [68335]="r",
- [68336]="r",
- [68337]="r",
- [68338]="r",
- [68339]="r",
- [68340]="r",
- [68341]="r",
- [68342]="r",
- [68352]="r",
- [68353]="r",
- [68354]="r",
- [68355]="r",
- [68356]="r",
- [68357]="r",
- [68358]="r",
- [68359]="r",
- [68360]="r",
- [68361]="r",
- [68362]="r",
- [68363]="r",
- [68364]="r",
- [68365]="r",
- [68366]="r",
- [68367]="r",
- [68368]="r",
- [68369]="r",
- [68370]="r",
- [68371]="r",
- [68372]="r",
- [68373]="r",
- [68374]="r",
- [68375]="r",
- [68376]="r",
- [68377]="r",
- [68378]="r",
- [68379]="r",
- [68380]="r",
- [68381]="r",
- [68382]="r",
- [68383]="r",
- [68384]="r",
- [68385]="r",
- [68386]="r",
- [68387]="r",
- [68388]="r",
- [68389]="r",
- [68390]="r",
- [68391]="r",
- [68392]="r",
- [68393]="r",
- [68394]="r",
- [68395]="r",
- [68396]="r",
- [68397]="r",
- [68398]="r",
- [68399]="r",
- [68400]="r",
- [68401]="r",
- [68402]="r",
- [68403]="r",
- [68404]="r",
- [68405]="r",
- [68409]="on",
- [68410]="on",
- [68411]="on",
- [68412]="on",
- [68413]="on",
- [68414]="on",
- [68415]="on",
- [68416]="r",
- [68417]="r",
- [68418]="r",
- [68419]="r",
- [68420]="r",
- [68421]="r",
- [68422]="r",
- [68423]="r",
- [68424]="r",
- [68425]="r",
- [68426]="r",
- [68427]="r",
- [68428]="r",
- [68429]="r",
- [68430]="r",
- [68431]="r",
- [68432]="r",
- [68433]="r",
- [68434]="r",
- [68435]="r",
- [68436]="r",
- [68437]="r",
- [68440]="r",
- [68441]="r",
- [68442]="r",
- [68443]="r",
- [68444]="r",
- [68445]="r",
- [68446]="r",
- [68447]="r",
- [68448]="r",
- [68449]="r",
- [68450]="r",
- [68451]="r",
- [68452]="r",
- [68453]="r",
- [68454]="r",
- [68455]="r",
- [68456]="r",
- [68457]="r",
- [68458]="r",
- [68459]="r",
- [68460]="r",
- [68461]="r",
- [68462]="r",
- [68463]="r",
- [68464]="r",
- [68465]="r",
- [68466]="r",
- [68472]="r",
- [68473]="r",
- [68474]="r",
- [68475]="r",
- [68476]="r",
- [68477]="r",
- [68478]="r",
- [68479]="r",
- [68480]="r",
- [68481]="r",
- [68482]="r",
- [68483]="r",
- [68484]="r",
- [68485]="r",
- [68486]="r",
- [68487]="r",
- [68488]="r",
- [68489]="r",
- [68490]="r",
- [68491]="r",
- [68492]="r",
- [68493]="r",
- [68494]="r",
- [68495]="r",
- [68496]="r",
- [68497]="r",
- [68505]="r",
- [68506]="r",
- [68507]="r",
- [68508]="r",
- [68521]="r",
- [68522]="r",
- [68523]="r",
- [68524]="r",
- [68525]="r",
- [68526]="r",
- [68527]="r",
- [68608]="r",
- [68609]="r",
- [68610]="r",
- [68611]="r",
- [68612]="r",
- [68613]="r",
- [68614]="r",
- [68615]="r",
- [68616]="r",
- [68617]="r",
- [68618]="r",
- [68619]="r",
- [68620]="r",
- [68621]="r",
- [68622]="r",
- [68623]="r",
- [68624]="r",
- [68625]="r",
- [68626]="r",
- [68627]="r",
- [68628]="r",
- [68629]="r",
- [68630]="r",
- [68631]="r",
- [68632]="r",
- [68633]="r",
- [68634]="r",
- [68635]="r",
- [68636]="r",
- [68637]="r",
- [68638]="r",
- [68639]="r",
- [68640]="r",
- [68641]="r",
- [68642]="r",
- [68643]="r",
- [68644]="r",
- [68645]="r",
- [68646]="r",
- [68647]="r",
- [68648]="r",
- [68649]="r",
- [68650]="r",
- [68651]="r",
- [68652]="r",
- [68653]="r",
- [68654]="r",
- [68655]="r",
- [68656]="r",
- [68657]="r",
- [68658]="r",
- [68659]="r",
- [68660]="r",
- [68661]="r",
- [68662]="r",
- [68663]="r",
- [68664]="r",
- [68665]="r",
- [68666]="r",
- [68667]="r",
- [68668]="r",
- [68669]="r",
- [68670]="r",
- [68671]="r",
- [68672]="r",
- [68673]="r",
- [68674]="r",
- [68675]="r",
- [68676]="r",
- [68677]="r",
- [68678]="r",
- [68679]="r",
- [68680]="r",
- [68736]="r",
- [68737]="r",
- [68738]="r",
- [68739]="r",
- [68740]="r",
- [68741]="r",
- [68742]="r",
- [68743]="r",
- [68744]="r",
- [68745]="r",
- [68746]="r",
- [68747]="r",
- [68748]="r",
- [68749]="r",
- [68750]="r",
- [68751]="r",
- [68752]="r",
- [68753]="r",
- [68754]="r",
- [68755]="r",
- [68756]="r",
- [68757]="r",
- [68758]="r",
- [68759]="r",
- [68760]="r",
- [68761]="r",
- [68762]="r",
- [68763]="r",
- [68764]="r",
- [68765]="r",
- [68766]="r",
- [68767]="r",
- [68768]="r",
- [68769]="r",
- [68770]="r",
- [68771]="r",
- [68772]="r",
- [68773]="r",
- [68774]="r",
- [68775]="r",
- [68776]="r",
- [68777]="r",
- [68778]="r",
- [68779]="r",
- [68780]="r",
- [68781]="r",
- [68782]="r",
- [68783]="r",
- [68784]="r",
- [68785]="r",
- [68786]="r",
- [68800]="r",
- [68801]="r",
- [68802]="r",
- [68803]="r",
- [68804]="r",
- [68805]="r",
- [68806]="r",
- [68807]="r",
- [68808]="r",
- [68809]="r",
- [68810]="r",
- [68811]="r",
- [68812]="r",
- [68813]="r",
- [68814]="r",
- [68815]="r",
- [68816]="r",
- [68817]="r",
- [68818]="r",
- [68819]="r",
- [68820]="r",
- [68821]="r",
- [68822]="r",
- [68823]="r",
- [68824]="r",
- [68825]="r",
- [68826]="r",
- [68827]="r",
- [68828]="r",
- [68829]="r",
- [68830]="r",
- [68831]="r",
- [68832]="r",
- [68833]="r",
- [68834]="r",
- [68835]="r",
- [68836]="r",
- [68837]="r",
- [68838]="r",
- [68839]="r",
- [68840]="r",
- [68841]="r",
- [68842]="r",
- [68843]="r",
- [68844]="r",
- [68845]="r",
- [68846]="r",
- [68847]="r",
- [68848]="r",
- [68849]="r",
- [68850]="r",
- [68858]="r",
- [68859]="r",
- [68860]="r",
- [68861]="r",
- [68862]="r",
- [68863]="r",
- [69216]="an",
- [69217]="an",
- [69218]="an",
- [69219]="an",
- [69220]="an",
- [69221]="an",
- [69222]="an",
- [69223]="an",
- [69224]="an",
- [69225]="an",
- [69226]="an",
- [69227]="an",
- [69228]="an",
- [69229]="an",
- [69230]="an",
- [69231]="an",
- [69232]="an",
- [69233]="an",
- [69234]="an",
- [69235]="an",
- [69236]="an",
- [69237]="an",
- [69238]="an",
- [69239]="an",
- [69240]="an",
- [69241]="an",
- [69242]="an",
- [69243]="an",
- [69244]="an",
- [69245]="an",
- [69246]="an",
- [69633]="nsm",
- [69688]="nsm",
- [69689]="nsm",
- [69690]="nsm",
- [69691]="nsm",
- [69692]="nsm",
- [69693]="nsm",
- [69694]="nsm",
- [69695]="nsm",
- [69696]="nsm",
- [69697]="nsm",
- [69698]="nsm",
- [69699]="nsm",
- [69700]="nsm",
- [69701]="nsm",
- [69702]="nsm",
- [69714]="on",
- [69715]="on",
- [69716]="on",
- [69717]="on",
- [69718]="on",
- [69719]="on",
- [69720]="on",
- [69721]="on",
- [69722]="on",
- [69723]="on",
- [69724]="on",
- [69725]="on",
- [69726]="on",
- [69727]="on",
- [69728]="on",
- [69729]="on",
- [69730]="on",
- [69731]="on",
- [69732]="on",
- [69733]="on",
- [69759]="nsm",
- [69760]="nsm",
- [69761]="nsm",
- [69811]="nsm",
- [69812]="nsm",
- [69813]="nsm",
- [69814]="nsm",
- [69817]="nsm",
- [69818]="nsm",
- [69888]="nsm",
- [69889]="nsm",
- [69890]="nsm",
- [69927]="nsm",
- [69928]="nsm",
- [69929]="nsm",
- [69930]="nsm",
- [69931]="nsm",
- [69933]="nsm",
- [69934]="nsm",
- [69935]="nsm",
- [69936]="nsm",
- [69937]="nsm",
- [69938]="nsm",
- [69939]="nsm",
- [69940]="nsm",
- [70003]="nsm",
- [70016]="nsm",
- [70017]="nsm",
- [70070]="nsm",
- [70071]="nsm",
- [70072]="nsm",
- [70073]="nsm",
- [70074]="nsm",
- [70075]="nsm",
- [70076]="nsm",
- [70077]="nsm",
- [70078]="nsm",
- [70090]="nsm",
- [70091]="nsm",
- [70092]="nsm",
- [70191]="nsm",
- [70192]="nsm",
- [70193]="nsm",
- [70196]="nsm",
- [70198]="nsm",
- [70199]="nsm",
- [70206]="nsm",
- [70367]="nsm",
- [70371]="nsm",
- [70372]="nsm",
- [70373]="nsm",
- [70374]="nsm",
- [70375]="nsm",
- [70376]="nsm",
- [70377]="nsm",
- [70378]="nsm",
- [70400]="nsm",
- [70401]="nsm",
- [70460]="nsm",
- [70464]="nsm",
- [70502]="nsm",
- [70503]="nsm",
- [70504]="nsm",
- [70505]="nsm",
- [70506]="nsm",
- [70507]="nsm",
- [70508]="nsm",
- [70512]="nsm",
- [70513]="nsm",
- [70514]="nsm",
- [70515]="nsm",
- [70516]="nsm",
- [70712]="nsm",
- [70713]="nsm",
- [70714]="nsm",
- [70715]="nsm",
- [70716]="nsm",
- [70717]="nsm",
- [70718]="nsm",
- [70719]="nsm",
- [70722]="nsm",
- [70723]="nsm",
- [70724]="nsm",
- [70726]="nsm",
- [70835]="nsm",
- [70836]="nsm",
- [70837]="nsm",
- [70838]="nsm",
- [70839]="nsm",
- [70840]="nsm",
- [70842]="nsm",
- [70847]="nsm",
- [70848]="nsm",
- [70850]="nsm",
- [70851]="nsm",
- [71090]="nsm",
- [71091]="nsm",
- [71092]="nsm",
- [71093]="nsm",
- [71100]="nsm",
- [71101]="nsm",
- [71103]="nsm",
- [71104]="nsm",
- [71132]="nsm",
- [71133]="nsm",
- [71219]="nsm",
- [71220]="nsm",
- [71221]="nsm",
- [71222]="nsm",
- [71223]="nsm",
- [71224]="nsm",
- [71225]="nsm",
- [71226]="nsm",
- [71229]="nsm",
- [71231]="nsm",
- [71232]="nsm",
- [71264]="on",
- [71265]="on",
- [71266]="on",
- [71267]="on",
- [71268]="on",
- [71269]="on",
- [71270]="on",
- [71271]="on",
- [71272]="on",
- [71273]="on",
- [71274]="on",
- [71275]="on",
- [71276]="on",
- [71339]="nsm",
- [71341]="nsm",
- [71344]="nsm",
- [71345]="nsm",
- [71346]="nsm",
- [71347]="nsm",
- [71348]="nsm",
- [71349]="nsm",
- [71351]="nsm",
- [71453]="nsm",
- [71454]="nsm",
- [71455]="nsm",
- [71458]="nsm",
- [71459]="nsm",
- [71460]="nsm",
- [71461]="nsm",
- [71463]="nsm",
- [71464]="nsm",
- [71465]="nsm",
- [71466]="nsm",
- [71467]="nsm",
- [72193]="nsm",
- [72194]="nsm",
- [72195]="nsm",
- [72196]="nsm",
- [72197]="nsm",
- [72198]="nsm",
- [72201]="nsm",
- [72202]="nsm",
- [72243]="nsm",
- [72244]="nsm",
- [72245]="nsm",
- [72246]="nsm",
- [72247]="nsm",
- [72248]="nsm",
- [72251]="nsm",
- [72252]="nsm",
- [72253]="nsm",
- [72254]="nsm",
- [72263]="nsm",
- [72273]="nsm",
- [72274]="nsm",
- [72275]="nsm",
- [72276]="nsm",
- [72277]="nsm",
- [72278]="nsm",
- [72281]="nsm",
- [72282]="nsm",
- [72283]="nsm",
- [72330]="nsm",
- [72331]="nsm",
- [72332]="nsm",
- [72333]="nsm",
- [72334]="nsm",
- [72335]="nsm",
- [72336]="nsm",
- [72337]="nsm",
- [72338]="nsm",
- [72339]="nsm",
- [72340]="nsm",
- [72341]="nsm",
- [72342]="nsm",
- [72344]="nsm",
- [72345]="nsm",
- [72752]="nsm",
- [72753]="nsm",
- [72754]="nsm",
- [72755]="nsm",
- [72756]="nsm",
- [72757]="nsm",
- [72758]="nsm",
- [72760]="nsm",
- [72761]="nsm",
- [72762]="nsm",
- [72763]="nsm",
- [72764]="nsm",
- [72765]="nsm",
- [72850]="nsm",
- [72851]="nsm",
- [72852]="nsm",
- [72853]="nsm",
- [72854]="nsm",
- [72855]="nsm",
- [72856]="nsm",
- [72857]="nsm",
- [72858]="nsm",
- [72859]="nsm",
- [72860]="nsm",
- [72861]="nsm",
- [72862]="nsm",
- [72863]="nsm",
- [72864]="nsm",
- [72865]="nsm",
- [72866]="nsm",
- [72867]="nsm",
- [72868]="nsm",
- [72869]="nsm",
- [72870]="nsm",
- [72871]="nsm",
- [72874]="nsm",
- [72875]="nsm",
- [72876]="nsm",
- [72877]="nsm",
- [72878]="nsm",
- [72879]="nsm",
- [72880]="nsm",
- [72882]="nsm",
- [72883]="nsm",
- [72885]="nsm",
- [72886]="nsm",
- [73009]="nsm",
- [73010]="nsm",
- [73011]="nsm",
- [73012]="nsm",
- [73013]="nsm",
- [73014]="nsm",
- [73018]="nsm",
- [73020]="nsm",
- [73021]="nsm",
- [73023]="nsm",
- [73024]="nsm",
- [73025]="nsm",
- [73026]="nsm",
- [73027]="nsm",
- [73028]="nsm",
- [73029]="nsm",
- [73031]="nsm",
- [92912]="nsm",
- [92913]="nsm",
- [92914]="nsm",
- [92915]="nsm",
- [92916]="nsm",
- [92976]="nsm",
- [92977]="nsm",
- [92978]="nsm",
- [92979]="nsm",
- [92980]="nsm",
- [92981]="nsm",
- [92982]="nsm",
- [94095]="nsm",
- [94096]="nsm",
- [94097]="nsm",
- [94098]="nsm",
- [113821]="nsm",
- [113822]="nsm",
- [113824]="bn",
- [113825]="bn",
- [113826]="bn",
- [113827]="bn",
- [119143]="nsm",
- [119144]="nsm",
- [119145]="nsm",
- [119155]="bn",
- [119156]="bn",
- [119157]="bn",
- [119158]="bn",
- [119159]="bn",
- [119160]="bn",
- [119161]="bn",
- [119162]="bn",
- [119163]="nsm",
- [119164]="nsm",
- [119165]="nsm",
- [119166]="nsm",
- [119167]="nsm",
- [119168]="nsm",
- [119169]="nsm",
- [119170]="nsm",
- [119173]="nsm",
- [119174]="nsm",
- [119175]="nsm",
- [119176]="nsm",
- [119177]="nsm",
- [119178]="nsm",
- [119179]="nsm",
- [119210]="nsm",
- [119211]="nsm",
- [119212]="nsm",
- [119213]="nsm",
- [119296]="on",
- [119297]="on",
- [119298]="on",
- [119299]="on",
- [119300]="on",
- [119301]="on",
- [119302]="on",
- [119303]="on",
- [119304]="on",
- [119305]="on",
- [119306]="on",
- [119307]="on",
- [119308]="on",
- [119309]="on",
- [119310]="on",
- [119311]="on",
- [119312]="on",
- [119313]="on",
- [119314]="on",
- [119315]="on",
- [119316]="on",
- [119317]="on",
- [119318]="on",
- [119319]="on",
- [119320]="on",
- [119321]="on",
- [119322]="on",
- [119323]="on",
- [119324]="on",
- [119325]="on",
- [119326]="on",
- [119327]="on",
- [119328]="on",
- [119329]="on",
- [119330]="on",
- [119331]="on",
- [119332]="on",
- [119333]="on",
- [119334]="on",
- [119335]="on",
- [119336]="on",
- [119337]="on",
- [119338]="on",
- [119339]="on",
- [119340]="on",
- [119341]="on",
- [119342]="on",
- [119343]="on",
- [119344]="on",
- [119345]="on",
- [119346]="on",
- [119347]="on",
- [119348]="on",
- [119349]="on",
- [119350]="on",
- [119351]="on",
- [119352]="on",
- [119353]="on",
- [119354]="on",
- [119355]="on",
- [119356]="on",
- [119357]="on",
- [119358]="on",
- [119359]="on",
- [119360]="on",
- [119361]="on",
- [119362]="nsm",
- [119363]="nsm",
- [119364]="nsm",
- [119365]="on",
- [119552]="on",
- [119553]="on",
- [119554]="on",
- [119555]="on",
- [119556]="on",
- [119557]="on",
- [119558]="on",
- [119559]="on",
- [119560]="on",
- [119561]="on",
- [119562]="on",
- [119563]="on",
- [119564]="on",
- [119565]="on",
- [119566]="on",
- [119567]="on",
- [119568]="on",
- [119569]="on",
- [119570]="on",
- [119571]="on",
- [119572]="on",
- [119573]="on",
- [119574]="on",
- [119575]="on",
- [119576]="on",
- [119577]="on",
- [119578]="on",
- [119579]="on",
- [119580]="on",
- [119581]="on",
- [119582]="on",
- [119583]="on",
- [119584]="on",
- [119585]="on",
- [119586]="on",
- [119587]="on",
- [119588]="on",
- [119589]="on",
- [119590]="on",
- [119591]="on",
- [119592]="on",
- [119593]="on",
- [119594]="on",
- [119595]="on",
- [119596]="on",
- [119597]="on",
- [119598]="on",
- [119599]="on",
- [119600]="on",
- [119601]="on",
- [119602]="on",
- [119603]="on",
- [119604]="on",
- [119605]="on",
- [119606]="on",
- [119607]="on",
- [119608]="on",
- [119609]="on",
- [119610]="on",
- [119611]="on",
- [119612]="on",
- [119613]="on",
- [119614]="on",
- [119615]="on",
- [119616]="on",
- [119617]="on",
- [119618]="on",
- [119619]="on",
- [119620]="on",
- [119621]="on",
- [119622]="on",
- [119623]="on",
- [119624]="on",
- [119625]="on",
- [119626]="on",
- [119627]="on",
- [119628]="on",
- [119629]="on",
- [119630]="on",
- [119631]="on",
- [119632]="on",
- [119633]="on",
- [119634]="on",
- [119635]="on",
- [119636]="on",
- [119637]="on",
- [119638]="on",
- [120539]="on",
- [120597]="on",
- [120655]="on",
- [120713]="on",
- [120771]="on",
- [120782]="en",
- [120783]="en",
- [120784]="en",
- [120785]="en",
- [120786]="en",
- [120787]="en",
- [120788]="en",
- [120789]="en",
- [120790]="en",
- [120791]="en",
- [120792]="en",
- [120793]="en",
- [120794]="en",
- [120795]="en",
- [120796]="en",
- [120797]="en",
- [120798]="en",
- [120799]="en",
- [120800]="en",
- [120801]="en",
- [120802]="en",
- [120803]="en",
- [120804]="en",
- [120805]="en",
- [120806]="en",
- [120807]="en",
- [120808]="en",
- [120809]="en",
- [120810]="en",
- [120811]="en",
- [120812]="en",
- [120813]="en",
- [120814]="en",
- [120815]="en",
- [120816]="en",
- [120817]="en",
- [120818]="en",
- [120819]="en",
- [120820]="en",
- [120821]="en",
- [120822]="en",
- [120823]="en",
- [120824]="en",
- [120825]="en",
- [120826]="en",
- [120827]="en",
- [120828]="en",
- [120829]="en",
- [120830]="en",
- [120831]="en",
- [121344]="nsm",
- [121345]="nsm",
- [121346]="nsm",
- [121347]="nsm",
- [121348]="nsm",
- [121349]="nsm",
- [121350]="nsm",
- [121351]="nsm",
- [121352]="nsm",
- [121353]="nsm",
- [121354]="nsm",
- [121355]="nsm",
- [121356]="nsm",
- [121357]="nsm",
- [121358]="nsm",
- [121359]="nsm",
- [121360]="nsm",
- [121361]="nsm",
- [121362]="nsm",
- [121363]="nsm",
- [121364]="nsm",
- [121365]="nsm",
- [121366]="nsm",
- [121367]="nsm",
- [121368]="nsm",
- [121369]="nsm",
- [121370]="nsm",
- [121371]="nsm",
- [121372]="nsm",
- [121373]="nsm",
- [121374]="nsm",
- [121375]="nsm",
- [121376]="nsm",
- [121377]="nsm",
- [121378]="nsm",
- [121379]="nsm",
- [121380]="nsm",
- [121381]="nsm",
- [121382]="nsm",
- [121383]="nsm",
- [121384]="nsm",
- [121385]="nsm",
- [121386]="nsm",
- [121387]="nsm",
- [121388]="nsm",
- [121389]="nsm",
- [121390]="nsm",
- [121391]="nsm",
- [121392]="nsm",
- [121393]="nsm",
- [121394]="nsm",
- [121395]="nsm",
- [121396]="nsm",
- [121397]="nsm",
- [121398]="nsm",
- [121403]="nsm",
- [121404]="nsm",
- [121405]="nsm",
- [121406]="nsm",
- [121407]="nsm",
- [121408]="nsm",
- [121409]="nsm",
- [121410]="nsm",
- [121411]="nsm",
- [121412]="nsm",
- [121413]="nsm",
- [121414]="nsm",
- [121415]="nsm",
- [121416]="nsm",
- [121417]="nsm",
- [121418]="nsm",
- [121419]="nsm",
- [121420]="nsm",
- [121421]="nsm",
- [121422]="nsm",
- [121423]="nsm",
- [121424]="nsm",
- [121425]="nsm",
- [121426]="nsm",
- [121427]="nsm",
- [121428]="nsm",
- [121429]="nsm",
- [121430]="nsm",
- [121431]="nsm",
- [121432]="nsm",
- [121433]="nsm",
- [121434]="nsm",
- [121435]="nsm",
- [121436]="nsm",
- [121437]="nsm",
- [121438]="nsm",
- [121439]="nsm",
- [121440]="nsm",
- [121441]="nsm",
- [121442]="nsm",
- [121443]="nsm",
- [121444]="nsm",
- [121445]="nsm",
- [121446]="nsm",
- [121447]="nsm",
- [121448]="nsm",
- [121449]="nsm",
- [121450]="nsm",
- [121451]="nsm",
- [121452]="nsm",
- [121461]="nsm",
- [121476]="nsm",
- [121499]="nsm",
- [121500]="nsm",
- [121501]="nsm",
- [121502]="nsm",
- [121503]="nsm",
- [121505]="nsm",
- [121506]="nsm",
- [121507]="nsm",
- [121508]="nsm",
- [121509]="nsm",
- [121510]="nsm",
- [121511]="nsm",
- [121512]="nsm",
- [121513]="nsm",
- [121514]="nsm",
- [121515]="nsm",
- [121516]="nsm",
- [121517]="nsm",
- [121518]="nsm",
- [121519]="nsm",
- [122880]="nsm",
- [122881]="nsm",
- [122882]="nsm",
- [122883]="nsm",
- [122884]="nsm",
- [122885]="nsm",
- [122886]="nsm",
- [122888]="nsm",
- [122889]="nsm",
- [122890]="nsm",
- [122891]="nsm",
- [122892]="nsm",
- [122893]="nsm",
- [122894]="nsm",
- [122895]="nsm",
- [122896]="nsm",
- [122897]="nsm",
- [122898]="nsm",
- [122899]="nsm",
- [122900]="nsm",
- [122901]="nsm",
- [122902]="nsm",
- [122903]="nsm",
- [122904]="nsm",
- [122907]="nsm",
- [122908]="nsm",
- [122909]="nsm",
- [122910]="nsm",
- [122911]="nsm",
- [122912]="nsm",
- [122913]="nsm",
- [122915]="nsm",
- [122916]="nsm",
- [122918]="nsm",
- [122919]="nsm",
- [122920]="nsm",
- [122921]="nsm",
- [122922]="nsm",
- [124928]="r",
- [124929]="r",
- [124930]="r",
- [124931]="r",
- [124932]="r",
- [124933]="r",
- [124934]="r",
- [124935]="r",
- [124936]="r",
- [124937]="r",
- [124938]="r",
- [124939]="r",
- [124940]="r",
- [124941]="r",
- [124942]="r",
- [124943]="r",
- [124944]="r",
- [124945]="r",
- [124946]="r",
- [124947]="r",
- [124948]="r",
- [124949]="r",
- [124950]="r",
- [124951]="r",
- [124952]="r",
- [124953]="r",
- [124954]="r",
- [124955]="r",
- [124956]="r",
- [124957]="r",
- [124958]="r",
- [124959]="r",
- [124960]="r",
- [124961]="r",
- [124962]="r",
- [124963]="r",
- [124964]="r",
- [124965]="r",
- [124966]="r",
- [124967]="r",
- [124968]="r",
- [124969]="r",
- [124970]="r",
- [124971]="r",
- [124972]="r",
- [124973]="r",
- [124974]="r",
- [124975]="r",
- [124976]="r",
- [124977]="r",
- [124978]="r",
- [124979]="r",
- [124980]="r",
- [124981]="r",
- [124982]="r",
- [124983]="r",
- [124984]="r",
- [124985]="r",
- [124986]="r",
- [124987]="r",
- [124988]="r",
- [124989]="r",
- [124990]="r",
- [124991]="r",
- [124992]="r",
- [124993]="r",
- [124994]="r",
- [124995]="r",
- [124996]="r",
- [124997]="r",
- [124998]="r",
- [124999]="r",
- [125000]="r",
- [125001]="r",
- [125002]="r",
- [125003]="r",
- [125004]="r",
- [125005]="r",
- [125006]="r",
- [125007]="r",
- [125008]="r",
- [125009]="r",
- [125010]="r",
- [125011]="r",
- [125012]="r",
- [125013]="r",
- [125014]="r",
- [125015]="r",
- [125016]="r",
- [125017]="r",
- [125018]="r",
- [125019]="r",
- [125020]="r",
- [125021]="r",
- [125022]="r",
- [125023]="r",
- [125024]="r",
- [125025]="r",
- [125026]="r",
- [125027]="r",
- [125028]="r",
- [125029]="r",
- [125030]="r",
- [125031]="r",
- [125032]="r",
- [125033]="r",
- [125034]="r",
- [125035]="r",
- [125036]="r",
- [125037]="r",
- [125038]="r",
- [125039]="r",
- [125040]="r",
- [125041]="r",
- [125042]="r",
- [125043]="r",
- [125044]="r",
- [125045]="r",
- [125046]="r",
- [125047]="r",
- [125048]="r",
- [125049]="r",
- [125050]="r",
- [125051]="r",
- [125052]="r",
- [125053]="r",
- [125054]="r",
- [125055]="r",
- [125056]="r",
- [125057]="r",
- [125058]="r",
- [125059]="r",
- [125060]="r",
- [125061]="r",
- [125062]="r",
- [125063]="r",
- [125064]="r",
- [125065]="r",
- [125066]="r",
- [125067]="r",
- [125068]="r",
- [125069]="r",
- [125070]="r",
- [125071]="r",
- [125072]="r",
- [125073]="r",
- [125074]="r",
- [125075]="r",
- [125076]="r",
- [125077]="r",
- [125078]="r",
- [125079]="r",
- [125080]="r",
- [125081]="r",
- [125082]="r",
- [125083]="r",
- [125084]="r",
- [125085]="r",
- [125086]="r",
- [125087]="r",
- [125088]="r",
- [125089]="r",
- [125090]="r",
- [125091]="r",
- [125092]="r",
- [125093]="r",
- [125094]="r",
- [125095]="r",
- [125096]="r",
- [125097]="r",
- [125098]="r",
- [125099]="r",
- [125100]="r",
- [125101]="r",
- [125102]="r",
- [125103]="r",
- [125104]="r",
- [125105]="r",
- [125106]="r",
- [125107]="r",
- [125108]="r",
- [125109]="r",
- [125110]="r",
- [125111]="r",
- [125112]="r",
- [125113]="r",
- [125114]="r",
- [125115]="r",
- [125116]="r",
- [125117]="r",
- [125118]="r",
- [125119]="r",
- [125120]="r",
- [125121]="r",
- [125122]="r",
- [125123]="r",
- [125124]="r",
- [125127]="r",
- [125128]="r",
- [125129]="r",
- [125130]="r",
- [125131]="r",
- [125132]="r",
- [125133]="r",
- [125134]="r",
- [125135]="r",
- [125136]="nsm",
- [125137]="nsm",
- [125138]="nsm",
- [125139]="nsm",
- [125140]="nsm",
- [125141]="nsm",
- [125142]="nsm",
- [125184]="r",
- [125185]="r",
- [125186]="r",
- [125187]="r",
- [125188]="r",
- [125189]="r",
- [125190]="r",
- [125191]="r",
- [125192]="r",
- [125193]="r",
- [125194]="r",
- [125195]="r",
- [125196]="r",
- [125197]="r",
- [125198]="r",
- [125199]="r",
- [125200]="r",
- [125201]="r",
- [125202]="r",
- [125203]="r",
- [125204]="r",
- [125205]="r",
- [125206]="r",
- [125207]="r",
- [125208]="r",
- [125209]="r",
- [125210]="r",
- [125211]="r",
- [125212]="r",
- [125213]="r",
- [125214]="r",
- [125215]="r",
- [125216]="r",
- [125217]="r",
- [125218]="r",
- [125219]="r",
- [125220]="r",
- [125221]="r",
- [125222]="r",
- [125223]="r",
- [125224]="r",
- [125225]="r",
- [125226]="r",
- [125227]="r",
- [125228]="r",
- [125229]="r",
- [125230]="r",
- [125231]="r",
- [125232]="r",
- [125233]="r",
- [125234]="r",
- [125235]="r",
- [125236]="r",
- [125237]="r",
- [125238]="r",
- [125239]="r",
- [125240]="r",
- [125241]="r",
- [125242]="r",
- [125243]="r",
- [125244]="r",
- [125245]="r",
- [125246]="r",
- [125247]="r",
- [125248]="r",
- [125249]="r",
- [125250]="r",
- [125251]="r",
- [125252]="nsm",
- [125253]="nsm",
- [125254]="nsm",
- [125255]="nsm",
- [125256]="nsm",
- [125257]="nsm",
- [125258]="nsm",
- [125264]="r",
- [125265]="r",
- [125266]="r",
- [125267]="r",
- [125268]="r",
- [125269]="r",
- [125270]="r",
- [125271]="r",
- [125272]="r",
- [125273]="r",
- [125278]="r",
- [125279]="r",
- [126464]="al",
- [126465]="al",
- [126466]="al",
- [126467]="al",
- [126469]="al",
- [126470]="al",
- [126471]="al",
- [126472]="al",
- [126473]="al",
- [126474]="al",
- [126475]="al",
- [126476]="al",
- [126477]="al",
- [126478]="al",
- [126479]="al",
- [126480]="al",
- [126481]="al",
- [126482]="al",
- [126483]="al",
- [126484]="al",
- [126485]="al",
- [126486]="al",
- [126487]="al",
- [126488]="al",
- [126489]="al",
- [126490]="al",
- [126491]="al",
- [126492]="al",
- [126493]="al",
- [126494]="al",
- [126495]="al",
- [126497]="al",
- [126498]="al",
- [126500]="al",
- [126503]="al",
- [126505]="al",
- [126506]="al",
- [126507]="al",
- [126508]="al",
- [126509]="al",
- [126510]="al",
- [126511]="al",
- [126512]="al",
- [126513]="al",
- [126514]="al",
- [126516]="al",
- [126517]="al",
- [126518]="al",
- [126519]="al",
- [126521]="al",
- [126523]="al",
- [126530]="al",
- [126535]="al",
- [126537]="al",
- [126539]="al",
- [126541]="al",
- [126542]="al",
- [126543]="al",
- [126545]="al",
- [126546]="al",
- [126548]="al",
- [126551]="al",
- [126553]="al",
- [126555]="al",
- [126557]="al",
- [126559]="al",
- [126561]="al",
- [126562]="al",
- [126564]="al",
- [126567]="al",
- [126568]="al",
- [126569]="al",
- [126570]="al",
- [126572]="al",
- [126573]="al",
- [126574]="al",
- [126575]="al",
- [126576]="al",
- [126577]="al",
- [126578]="al",
- [126580]="al",
- [126581]="al",
- [126582]="al",
- [126583]="al",
- [126585]="al",
- [126586]="al",
- [126587]="al",
- [126588]="al",
- [126590]="al",
- [126592]="al",
- [126593]="al",
- [126594]="al",
- [126595]="al",
- [126596]="al",
- [126597]="al",
- [126598]="al",
- [126599]="al",
- [126600]="al",
- [126601]="al",
- [126603]="al",
- [126604]="al",
- [126605]="al",
- [126606]="al",
- [126607]="al",
- [126608]="al",
- [126609]="al",
- [126610]="al",
- [126611]="al",
- [126612]="al",
- [126613]="al",
- [126614]="al",
- [126615]="al",
- [126616]="al",
- [126617]="al",
- [126618]="al",
- [126619]="al",
- [126625]="al",
- [126626]="al",
- [126627]="al",
- [126629]="al",
- [126630]="al",
- [126631]="al",
- [126632]="al",
- [126633]="al",
- [126635]="al",
- [126636]="al",
- [126637]="al",
- [126638]="al",
- [126639]="al",
- [126640]="al",
- [126641]="al",
- [126642]="al",
- [126643]="al",
- [126644]="al",
- [126645]="al",
- [126646]="al",
- [126647]="al",
- [126648]="al",
- [126649]="al",
- [126650]="al",
- [126651]="al",
- [126704]="on",
- [126705]="on",
- [126976]="on",
- [126977]="on",
- [126978]="on",
- [126979]="on",
- [126980]="on",
- [126981]="on",
- [126982]="on",
- [126983]="on",
- [126984]="on",
- [126985]="on",
- [126986]="on",
- [126987]="on",
- [126988]="on",
- [126989]="on",
- [126990]="on",
- [126991]="on",
- [126992]="on",
- [126993]="on",
- [126994]="on",
- [126995]="on",
- [126996]="on",
- [126997]="on",
- [126998]="on",
- [126999]="on",
- [127000]="on",
- [127001]="on",
- [127002]="on",
- [127003]="on",
- [127004]="on",
- [127005]="on",
- [127006]="on",
- [127007]="on",
- [127008]="on",
- [127009]="on",
- [127010]="on",
- [127011]="on",
- [127012]="on",
- [127013]="on",
- [127014]="on",
- [127015]="on",
- [127016]="on",
- [127017]="on",
- [127018]="on",
- [127019]="on",
- [127024]="on",
- [127025]="on",
- [127026]="on",
- [127027]="on",
- [127028]="on",
- [127029]="on",
- [127030]="on",
- [127031]="on",
- [127032]="on",
- [127033]="on",
- [127034]="on",
- [127035]="on",
- [127036]="on",
- [127037]="on",
- [127038]="on",
- [127039]="on",
- [127040]="on",
- [127041]="on",
- [127042]="on",
- [127043]="on",
- [127044]="on",
- [127045]="on",
- [127046]="on",
- [127047]="on",
- [127048]="on",
- [127049]="on",
- [127050]="on",
- [127051]="on",
- [127052]="on",
- [127053]="on",
- [127054]="on",
- [127055]="on",
- [127056]="on",
- [127057]="on",
- [127058]="on",
- [127059]="on",
- [127060]="on",
- [127061]="on",
- [127062]="on",
- [127063]="on",
- [127064]="on",
- [127065]="on",
- [127066]="on",
- [127067]="on",
- [127068]="on",
- [127069]="on",
- [127070]="on",
- [127071]="on",
- [127072]="on",
- [127073]="on",
- [127074]="on",
- [127075]="on",
- [127076]="on",
- [127077]="on",
- [127078]="on",
- [127079]="on",
- [127080]="on",
- [127081]="on",
- [127082]="on",
- [127083]="on",
- [127084]="on",
- [127085]="on",
- [127086]="on",
- [127087]="on",
- [127088]="on",
- [127089]="on",
- [127090]="on",
- [127091]="on",
- [127092]="on",
- [127093]="on",
- [127094]="on",
- [127095]="on",
- [127096]="on",
- [127097]="on",
- [127098]="on",
- [127099]="on",
- [127100]="on",
- [127101]="on",
- [127102]="on",
- [127103]="on",
- [127104]="on",
- [127105]="on",
- [127106]="on",
- [127107]="on",
- [127108]="on",
- [127109]="on",
- [127110]="on",
- [127111]="on",
- [127112]="on",
- [127113]="on",
- [127114]="on",
- [127115]="on",
- [127116]="on",
- [127117]="on",
- [127118]="on",
- [127119]="on",
- [127120]="on",
- [127121]="on",
- [127122]="on",
- [127123]="on",
- [127136]="on",
- [127137]="on",
- [127138]="on",
- [127139]="on",
- [127140]="on",
- [127141]="on",
- [127142]="on",
- [127143]="on",
- [127144]="on",
- [127145]="on",
- [127146]="on",
- [127147]="on",
- [127148]="on",
- [127149]="on",
- [127150]="on",
- [127153]="on",
- [127154]="on",
- [127155]="on",
- [127156]="on",
- [127157]="on",
- [127158]="on",
- [127159]="on",
- [127160]="on",
- [127161]="on",
- [127162]="on",
- [127163]="on",
- [127164]="on",
- [127165]="on",
- [127166]="on",
- [127167]="on",
- [127169]="on",
- [127170]="on",
- [127171]="on",
- [127172]="on",
- [127173]="on",
- [127174]="on",
- [127175]="on",
- [127176]="on",
- [127177]="on",
- [127178]="on",
- [127179]="on",
- [127180]="on",
- [127181]="on",
- [127182]="on",
- [127183]="on",
- [127185]="on",
- [127186]="on",
- [127187]="on",
- [127188]="on",
- [127189]="on",
- [127190]="on",
- [127191]="on",
- [127192]="on",
- [127193]="on",
- [127194]="on",
- [127195]="on",
- [127196]="on",
- [127197]="on",
- [127198]="on",
- [127199]="on",
- [127200]="on",
- [127201]="on",
- [127202]="on",
- [127203]="on",
- [127204]="on",
- [127205]="on",
- [127206]="on",
- [127207]="on",
- [127208]="on",
- [127209]="on",
- [127210]="on",
- [127211]="on",
- [127212]="on",
- [127213]="on",
- [127214]="on",
- [127215]="on",
- [127216]="on",
- [127217]="on",
- [127218]="on",
- [127219]="on",
- [127220]="on",
- [127221]="on",
- [127232]="en",
- [127233]="en",
- [127234]="en",
- [127235]="en",
- [127236]="en",
- [127237]="en",
- [127238]="en",
- [127239]="en",
- [127240]="en",
- [127241]="en",
- [127242]="en",
- [127243]="on",
- [127244]="on",
- [127338]="on",
- [127339]="on",
- [127584]="on",
- [127585]="on",
- [127586]="on",
- [127587]="on",
- [127588]="on",
- [127589]="on",
- [127744]="on",
- [127745]="on",
- [127746]="on",
- [127747]="on",
- [127748]="on",
- [127749]="on",
- [127750]="on",
- [127751]="on",
- [127752]="on",
- [127753]="on",
- [127754]="on",
- [127755]="on",
- [127756]="on",
- [127757]="on",
- [127758]="on",
- [127759]="on",
- [127760]="on",
- [127761]="on",
- [127762]="on",
- [127763]="on",
- [127764]="on",
- [127765]="on",
- [127766]="on",
- [127767]="on",
- [127768]="on",
- [127769]="on",
- [127770]="on",
- [127771]="on",
- [127772]="on",
- [127773]="on",
- [127774]="on",
- [127775]="on",
- [127776]="on",
- [127777]="on",
- [127778]="on",
- [127779]="on",
- [127780]="on",
- [127781]="on",
- [127782]="on",
- [127783]="on",
- [127784]="on",
- [127785]="on",
- [127786]="on",
- [127787]="on",
- [127788]="on",
- [127789]="on",
- [127790]="on",
- [127791]="on",
- [127792]="on",
- [127793]="on",
- [127794]="on",
- [127795]="on",
- [127796]="on",
- [127797]="on",
- [127798]="on",
- [127799]="on",
- [127800]="on",
- [127801]="on",
- [127802]="on",
- [127803]="on",
- [127804]="on",
- [127805]="on",
- [127806]="on",
- [127807]="on",
- [127808]="on",
- [127809]="on",
- [127810]="on",
- [127811]="on",
- [127812]="on",
- [127813]="on",
- [127814]="on",
- [127815]="on",
- [127816]="on",
- [127817]="on",
- [127818]="on",
- [127819]="on",
- [127820]="on",
- [127821]="on",
- [127822]="on",
- [127823]="on",
- [127824]="on",
- [127825]="on",
- [127826]="on",
- [127827]="on",
- [127828]="on",
- [127829]="on",
- [127830]="on",
- [127831]="on",
- [127832]="on",
- [127833]="on",
- [127834]="on",
- [127835]="on",
- [127836]="on",
- [127837]="on",
- [127838]="on",
- [127839]="on",
- [127840]="on",
- [127841]="on",
- [127842]="on",
- [127843]="on",
- [127844]="on",
- [127845]="on",
- [127846]="on",
- [127847]="on",
- [127848]="on",
- [127849]="on",
- [127850]="on",
- [127851]="on",
- [127852]="on",
- [127853]="on",
- [127854]="on",
- [127855]="on",
- [127856]="on",
- [127857]="on",
- [127858]="on",
- [127859]="on",
- [127860]="on",
- [127861]="on",
- [127862]="on",
- [127863]="on",
- [127864]="on",
- [127865]="on",
- [127866]="on",
- [127867]="on",
- [127868]="on",
- [127869]="on",
- [127870]="on",
- [127871]="on",
- [127872]="on",
- [127873]="on",
- [127874]="on",
- [127875]="on",
- [127876]="on",
- [127877]="on",
- [127878]="on",
- [127879]="on",
- [127880]="on",
- [127881]="on",
- [127882]="on",
- [127883]="on",
- [127884]="on",
- [127885]="on",
- [127886]="on",
- [127887]="on",
- [127888]="on",
- [127889]="on",
- [127890]="on",
- [127891]="on",
- [127892]="on",
- [127893]="on",
- [127894]="on",
- [127895]="on",
- [127896]="on",
- [127897]="on",
- [127898]="on",
- [127899]="on",
- [127900]="on",
- [127901]="on",
- [127902]="on",
- [127903]="on",
- [127904]="on",
- [127905]="on",
- [127906]="on",
- [127907]="on",
- [127908]="on",
- [127909]="on",
- [127910]="on",
- [127911]="on",
- [127912]="on",
- [127913]="on",
- [127914]="on",
- [127915]="on",
- [127916]="on",
- [127917]="on",
- [127918]="on",
- [127919]="on",
- [127920]="on",
- [127921]="on",
- [127922]="on",
- [127923]="on",
- [127924]="on",
- [127925]="on",
- [127926]="on",
- [127927]="on",
- [127928]="on",
- [127929]="on",
- [127930]="on",
- [127931]="on",
- [127932]="on",
- [127933]="on",
- [127934]="on",
- [127935]="on",
- [127936]="on",
- [127937]="on",
- [127938]="on",
- [127939]="on",
- [127940]="on",
- [127941]="on",
- [127942]="on",
- [127943]="on",
- [127944]="on",
- [127945]="on",
- [127946]="on",
- [127947]="on",
- [127948]="on",
- [127949]="on",
- [127950]="on",
- [127951]="on",
- [127952]="on",
- [127953]="on",
- [127954]="on",
- [127955]="on",
- [127956]="on",
- [127957]="on",
- [127958]="on",
- [127959]="on",
- [127960]="on",
- [127961]="on",
- [127962]="on",
- [127963]="on",
- [127964]="on",
- [127965]="on",
- [127966]="on",
- [127967]="on",
- [127968]="on",
- [127969]="on",
- [127970]="on",
- [127971]="on",
- [127972]="on",
- [127973]="on",
- [127974]="on",
- [127975]="on",
- [127976]="on",
- [127977]="on",
- [127978]="on",
- [127979]="on",
- [127980]="on",
- [127981]="on",
- [127982]="on",
- [127983]="on",
- [127984]="on",
- [127985]="on",
- [127986]="on",
- [127987]="on",
- [127988]="on",
- [127989]="on",
- [127990]="on",
- [127991]="on",
- [127992]="on",
- [127993]="on",
- [127994]="on",
- [127995]="on",
- [127996]="on",
- [127997]="on",
- [127998]="on",
- [127999]="on",
- [128000]="on",
- [128001]="on",
- [128002]="on",
- [128003]="on",
- [128004]="on",
- [128005]="on",
- [128006]="on",
- [128007]="on",
- [128008]="on",
- [128009]="on",
- [128010]="on",
- [128011]="on",
- [128012]="on",
- [128013]="on",
- [128014]="on",
- [128015]="on",
- [128016]="on",
- [128017]="on",
- [128018]="on",
- [128019]="on",
- [128020]="on",
- [128021]="on",
- [128022]="on",
- [128023]="on",
- [128024]="on",
- [128025]="on",
- [128026]="on",
- [128027]="on",
- [128028]="on",
- [128029]="on",
- [128030]="on",
- [128031]="on",
- [128032]="on",
- [128033]="on",
- [128034]="on",
- [128035]="on",
- [128036]="on",
- [128037]="on",
- [128038]="on",
- [128039]="on",
- [128040]="on",
- [128041]="on",
- [128042]="on",
- [128043]="on",
- [128044]="on",
- [128045]="on",
- [128046]="on",
- [128047]="on",
- [128048]="on",
- [128049]="on",
- [128050]="on",
- [128051]="on",
- [128052]="on",
- [128053]="on",
- [128054]="on",
- [128055]="on",
- [128056]="on",
- [128057]="on",
- [128058]="on",
- [128059]="on",
- [128060]="on",
- [128061]="on",
- [128062]="on",
- [128063]="on",
- [128064]="on",
- [128065]="on",
- [128066]="on",
- [128067]="on",
- [128068]="on",
- [128069]="on",
- [128070]="on",
- [128071]="on",
- [128072]="on",
- [128073]="on",
- [128074]="on",
- [128075]="on",
- [128076]="on",
- [128077]="on",
- [128078]="on",
- [128079]="on",
- [128080]="on",
- [128081]="on",
- [128082]="on",
- [128083]="on",
- [128084]="on",
- [128085]="on",
- [128086]="on",
- [128087]="on",
- [128088]="on",
- [128089]="on",
- [128090]="on",
- [128091]="on",
- [128092]="on",
- [128093]="on",
- [128094]="on",
- [128095]="on",
- [128096]="on",
- [128097]="on",
- [128098]="on",
- [128099]="on",
- [128100]="on",
- [128101]="on",
- [128102]="on",
- [128103]="on",
- [128104]="on",
- [128105]="on",
- [128106]="on",
- [128107]="on",
- [128108]="on",
- [128109]="on",
- [128110]="on",
- [128111]="on",
- [128112]="on",
- [128113]="on",
- [128114]="on",
- [128115]="on",
- [128116]="on",
- [128117]="on",
- [128118]="on",
- [128119]="on",
- [128120]="on",
- [128121]="on",
- [128122]="on",
- [128123]="on",
- [128124]="on",
- [128125]="on",
- [128126]="on",
- [128127]="on",
- [128128]="on",
- [128129]="on",
- [128130]="on",
- [128131]="on",
- [128132]="on",
- [128133]="on",
- [128134]="on",
- [128135]="on",
- [128136]="on",
- [128137]="on",
- [128138]="on",
- [128139]="on",
- [128140]="on",
- [128141]="on",
- [128142]="on",
- [128143]="on",
- [128144]="on",
- [128145]="on",
- [128146]="on",
- [128147]="on",
- [128148]="on",
- [128149]="on",
- [128150]="on",
- [128151]="on",
- [128152]="on",
- [128153]="on",
- [128154]="on",
- [128155]="on",
- [128156]="on",
- [128157]="on",
- [128158]="on",
- [128159]="on",
- [128160]="on",
- [128161]="on",
- [128162]="on",
- [128163]="on",
- [128164]="on",
- [128165]="on",
- [128166]="on",
- [128167]="on",
- [128168]="on",
- [128169]="on",
- [128170]="on",
- [128171]="on",
- [128172]="on",
- [128173]="on",
- [128174]="on",
- [128175]="on",
- [128176]="on",
- [128177]="on",
- [128178]="on",
- [128179]="on",
- [128180]="on",
- [128181]="on",
- [128182]="on",
- [128183]="on",
- [128184]="on",
- [128185]="on",
- [128186]="on",
- [128187]="on",
- [128188]="on",
- [128189]="on",
- [128190]="on",
- [128191]="on",
- [128192]="on",
- [128193]="on",
- [128194]="on",
- [128195]="on",
- [128196]="on",
- [128197]="on",
- [128198]="on",
- [128199]="on",
- [128200]="on",
- [128201]="on",
- [128202]="on",
- [128203]="on",
- [128204]="on",
- [128205]="on",
- [128206]="on",
- [128207]="on",
- [128208]="on",
- [128209]="on",
- [128210]="on",
- [128211]="on",
- [128212]="on",
- [128213]="on",
- [128214]="on",
- [128215]="on",
- [128216]="on",
- [128217]="on",
- [128218]="on",
- [128219]="on",
- [128220]="on",
- [128221]="on",
- [128222]="on",
- [128223]="on",
- [128224]="on",
- [128225]="on",
- [128226]="on",
- [128227]="on",
- [128228]="on",
- [128229]="on",
- [128230]="on",
- [128231]="on",
- [128232]="on",
- [128233]="on",
- [128234]="on",
- [128235]="on",
- [128236]="on",
- [128237]="on",
- [128238]="on",
- [128239]="on",
- [128240]="on",
- [128241]="on",
- [128242]="on",
- [128243]="on",
- [128244]="on",
- [128245]="on",
- [128246]="on",
- [128247]="on",
- [128248]="on",
- [128249]="on",
- [128250]="on",
- [128251]="on",
- [128252]="on",
- [128253]="on",
- [128254]="on",
- [128255]="on",
- [128256]="on",
- [128257]="on",
- [128258]="on",
- [128259]="on",
- [128260]="on",
- [128261]="on",
- [128262]="on",
- [128263]="on",
- [128264]="on",
- [128265]="on",
- [128266]="on",
- [128267]="on",
- [128268]="on",
- [128269]="on",
- [128270]="on",
- [128271]="on",
- [128272]="on",
- [128273]="on",
- [128274]="on",
- [128275]="on",
- [128276]="on",
- [128277]="on",
- [128278]="on",
- [128279]="on",
- [128280]="on",
- [128281]="on",
- [128282]="on",
- [128283]="on",
- [128284]="on",
- [128285]="on",
- [128286]="on",
- [128287]="on",
- [128288]="on",
- [128289]="on",
- [128290]="on",
- [128291]="on",
- [128292]="on",
- [128293]="on",
- [128294]="on",
- [128295]="on",
- [128296]="on",
- [128297]="on",
- [128298]="on",
- [128299]="on",
- [128300]="on",
- [128301]="on",
- [128302]="on",
- [128303]="on",
- [128304]="on",
- [128305]="on",
- [128306]="on",
- [128307]="on",
- [128308]="on",
- [128309]="on",
- [128310]="on",
- [128311]="on",
- [128312]="on",
- [128313]="on",
- [128314]="on",
- [128315]="on",
- [128316]="on",
- [128317]="on",
- [128318]="on",
- [128319]="on",
- [128320]="on",
- [128321]="on",
- [128322]="on",
- [128323]="on",
- [128324]="on",
- [128325]="on",
- [128326]="on",
- [128327]="on",
- [128328]="on",
- [128329]="on",
- [128330]="on",
- [128331]="on",
- [128332]="on",
- [128333]="on",
- [128334]="on",
- [128335]="on",
- [128336]="on",
- [128337]="on",
- [128338]="on",
- [128339]="on",
- [128340]="on",
- [128341]="on",
- [128342]="on",
- [128343]="on",
- [128344]="on",
- [128345]="on",
- [128346]="on",
- [128347]="on",
- [128348]="on",
- [128349]="on",
- [128350]="on",
- [128351]="on",
- [128352]="on",
- [128353]="on",
- [128354]="on",
- [128355]="on",
- [128356]="on",
- [128357]="on",
- [128358]="on",
- [128359]="on",
- [128360]="on",
- [128361]="on",
- [128362]="on",
- [128363]="on",
- [128364]="on",
- [128365]="on",
- [128366]="on",
- [128367]="on",
- [128368]="on",
- [128369]="on",
- [128370]="on",
- [128371]="on",
- [128372]="on",
- [128373]="on",
- [128374]="on",
- [128375]="on",
- [128376]="on",
- [128377]="on",
- [128378]="on",
- [128379]="on",
- [128380]="on",
- [128381]="on",
- [128382]="on",
- [128383]="on",
- [128384]="on",
- [128385]="on",
- [128386]="on",
- [128387]="on",
- [128388]="on",
- [128389]="on",
- [128390]="on",
- [128391]="on",
- [128392]="on",
- [128393]="on",
- [128394]="on",
- [128395]="on",
- [128396]="on",
- [128397]="on",
- [128398]="on",
- [128399]="on",
- [128400]="on",
- [128401]="on",
- [128402]="on",
- [128403]="on",
- [128404]="on",
- [128405]="on",
- [128406]="on",
- [128407]="on",
- [128408]="on",
- [128409]="on",
- [128410]="on",
- [128411]="on",
- [128412]="on",
- [128413]="on",
- [128414]="on",
- [128415]="on",
- [128416]="on",
- [128417]="on",
- [128418]="on",
- [128419]="on",
- [128420]="on",
- [128421]="on",
- [128422]="on",
- [128423]="on",
- [128424]="on",
- [128425]="on",
- [128426]="on",
- [128427]="on",
- [128428]="on",
- [128429]="on",
- [128430]="on",
- [128431]="on",
- [128432]="on",
- [128433]="on",
- [128434]="on",
- [128435]="on",
- [128436]="on",
- [128437]="on",
- [128438]="on",
- [128439]="on",
- [128440]="on",
- [128441]="on",
- [128442]="on",
- [128443]="on",
- [128444]="on",
- [128445]="on",
- [128446]="on",
- [128447]="on",
- [128448]="on",
- [128449]="on",
- [128450]="on",
- [128451]="on",
- [128452]="on",
- [128453]="on",
- [128454]="on",
- [128455]="on",
- [128456]="on",
- [128457]="on",
- [128458]="on",
- [128459]="on",
- [128460]="on",
- [128461]="on",
- [128462]="on",
- [128463]="on",
- [128464]="on",
- [128465]="on",
- [128466]="on",
- [128467]="on",
- [128468]="on",
- [128469]="on",
- [128470]="on",
- [128471]="on",
- [128472]="on",
- [128473]="on",
- [128474]="on",
- [128475]="on",
- [128476]="on",
- [128477]="on",
- [128478]="on",
- [128479]="on",
- [128480]="on",
- [128481]="on",
- [128482]="on",
- [128483]="on",
- [128484]="on",
- [128485]="on",
- [128486]="on",
- [128487]="on",
- [128488]="on",
- [128489]="on",
- [128490]="on",
- [128491]="on",
- [128492]="on",
- [128493]="on",
- [128494]="on",
- [128495]="on",
- [128496]="on",
- [128497]="on",
- [128498]="on",
- [128499]="on",
- [128500]="on",
- [128501]="on",
- [128502]="on",
- [128503]="on",
- [128504]="on",
- [128505]="on",
- [128506]="on",
- [128507]="on",
- [128508]="on",
- [128509]="on",
- [128510]="on",
- [128511]="on",
- [128512]="on",
- [128513]="on",
- [128514]="on",
- [128515]="on",
- [128516]="on",
- [128517]="on",
- [128518]="on",
- [128519]="on",
- [128520]="on",
- [128521]="on",
- [128522]="on",
- [128523]="on",
- [128524]="on",
- [128525]="on",
- [128526]="on",
- [128527]="on",
- [128528]="on",
- [128529]="on",
- [128530]="on",
- [128531]="on",
- [128532]="on",
- [128533]="on",
- [128534]="on",
- [128535]="on",
- [128536]="on",
- [128537]="on",
- [128538]="on",
- [128539]="on",
- [128540]="on",
- [128541]="on",
- [128542]="on",
- [128543]="on",
- [128544]="on",
- [128545]="on",
- [128546]="on",
- [128547]="on",
- [128548]="on",
- [128549]="on",
- [128550]="on",
- [128551]="on",
- [128552]="on",
- [128553]="on",
- [128554]="on",
- [128555]="on",
- [128556]="on",
- [128557]="on",
- [128558]="on",
- [128559]="on",
- [128560]="on",
- [128561]="on",
- [128562]="on",
- [128563]="on",
- [128564]="on",
- [128565]="on",
- [128566]="on",
- [128567]="on",
- [128568]="on",
- [128569]="on",
- [128570]="on",
- [128571]="on",
- [128572]="on",
- [128573]="on",
- [128574]="on",
- [128575]="on",
- [128576]="on",
- [128577]="on",
- [128578]="on",
- [128579]="on",
- [128580]="on",
- [128581]="on",
- [128582]="on",
- [128583]="on",
- [128584]="on",
- [128585]="on",
- [128586]="on",
- [128587]="on",
- [128588]="on",
- [128589]="on",
- [128590]="on",
- [128591]="on",
- [128592]="on",
- [128593]="on",
- [128594]="on",
- [128595]="on",
- [128596]="on",
- [128597]="on",
- [128598]="on",
- [128599]="on",
- [128600]="on",
- [128601]="on",
- [128602]="on",
- [128603]="on",
- [128604]="on",
- [128605]="on",
- [128606]="on",
- [128607]="on",
- [128608]="on",
- [128609]="on",
- [128610]="on",
- [128611]="on",
- [128612]="on",
- [128613]="on",
- [128614]="on",
- [128615]="on",
- [128616]="on",
- [128617]="on",
- [128618]="on",
- [128619]="on",
- [128620]="on",
- [128621]="on",
- [128622]="on",
- [128623]="on",
- [128624]="on",
- [128625]="on",
- [128626]="on",
- [128627]="on",
- [128628]="on",
- [128629]="on",
- [128630]="on",
- [128631]="on",
- [128632]="on",
- [128633]="on",
- [128634]="on",
- [128635]="on",
- [128636]="on",
- [128637]="on",
- [128638]="on",
- [128639]="on",
- [128640]="on",
- [128641]="on",
- [128642]="on",
- [128643]="on",
- [128644]="on",
- [128645]="on",
- [128646]="on",
- [128647]="on",
- [128648]="on",
- [128649]="on",
- [128650]="on",
- [128651]="on",
- [128652]="on",
- [128653]="on",
- [128654]="on",
- [128655]="on",
- [128656]="on",
- [128657]="on",
- [128658]="on",
- [128659]="on",
- [128660]="on",
- [128661]="on",
- [128662]="on",
- [128663]="on",
- [128664]="on",
- [128665]="on",
- [128666]="on",
- [128667]="on",
- [128668]="on",
- [128669]="on",
- [128670]="on",
- [128671]="on",
- [128672]="on",
- [128673]="on",
- [128674]="on",
- [128675]="on",
- [128676]="on",
- [128677]="on",
- [128678]="on",
- [128679]="on",
- [128680]="on",
- [128681]="on",
- [128682]="on",
- [128683]="on",
- [128684]="on",
- [128685]="on",
- [128686]="on",
- [128687]="on",
- [128688]="on",
- [128689]="on",
- [128690]="on",
- [128691]="on",
- [128692]="on",
- [128693]="on",
- [128694]="on",
- [128695]="on",
- [128696]="on",
- [128697]="on",
- [128698]="on",
- [128699]="on",
- [128700]="on",
- [128701]="on",
- [128702]="on",
- [128703]="on",
- [128704]="on",
- [128705]="on",
- [128706]="on",
- [128707]="on",
- [128708]="on",
- [128709]="on",
- [128710]="on",
- [128711]="on",
- [128712]="on",
- [128713]="on",
- [128714]="on",
- [128715]="on",
- [128716]="on",
- [128717]="on",
- [128718]="on",
- [128719]="on",
- [128720]="on",
- [128721]="on",
- [128722]="on",
- [128723]="on",
- [128724]="on",
- [128736]="on",
- [128737]="on",
- [128738]="on",
- [128739]="on",
- [128740]="on",
- [128741]="on",
- [128742]="on",
- [128743]="on",
- [128744]="on",
- [128745]="on",
- [128746]="on",
- [128747]="on",
- [128748]="on",
- [128752]="on",
- [128753]="on",
- [128754]="on",
- [128755]="on",
- [128756]="on",
- [128757]="on",
- [128758]="on",
- [128759]="on",
- [128760]="on",
- [128768]="on",
- [128769]="on",
- [128770]="on",
- [128771]="on",
- [128772]="on",
- [128773]="on",
- [128774]="on",
- [128775]="on",
- [128776]="on",
- [128777]="on",
- [128778]="on",
- [128779]="on",
- [128780]="on",
- [128781]="on",
- [128782]="on",
- [128783]="on",
- [128784]="on",
- [128785]="on",
- [128786]="on",
- [128787]="on",
- [128788]="on",
- [128789]="on",
- [128790]="on",
- [128791]="on",
- [128792]="on",
- [128793]="on",
- [128794]="on",
- [128795]="on",
- [128796]="on",
- [128797]="on",
- [128798]="on",
- [128799]="on",
- [128800]="on",
- [128801]="on",
- [128802]="on",
- [128803]="on",
- [128804]="on",
- [128805]="on",
- [128806]="on",
- [128807]="on",
- [128808]="on",
- [128809]="on",
- [128810]="on",
- [128811]="on",
- [128812]="on",
- [128813]="on",
- [128814]="on",
- [128815]="on",
- [128816]="on",
- [128817]="on",
- [128818]="on",
- [128819]="on",
- [128820]="on",
- [128821]="on",
- [128822]="on",
- [128823]="on",
- [128824]="on",
- [128825]="on",
- [128826]="on",
- [128827]="on",
- [128828]="on",
- [128829]="on",
- [128830]="on",
- [128831]="on",
- [128832]="on",
- [128833]="on",
- [128834]="on",
- [128835]="on",
- [128836]="on",
- [128837]="on",
- [128838]="on",
- [128839]="on",
- [128840]="on",
- [128841]="on",
- [128842]="on",
- [128843]="on",
- [128844]="on",
- [128845]="on",
- [128846]="on",
- [128847]="on",
- [128848]="on",
- [128849]="on",
- [128850]="on",
- [128851]="on",
- [128852]="on",
- [128853]="on",
- [128854]="on",
- [128855]="on",
- [128856]="on",
- [128857]="on",
- [128858]="on",
- [128859]="on",
- [128860]="on",
- [128861]="on",
- [128862]="on",
- [128863]="on",
- [128864]="on",
- [128865]="on",
- [128866]="on",
- [128867]="on",
- [128868]="on",
- [128869]="on",
- [128870]="on",
- [128871]="on",
- [128872]="on",
- [128873]="on",
- [128874]="on",
- [128875]="on",
- [128876]="on",
- [128877]="on",
- [128878]="on",
- [128879]="on",
- [128880]="on",
- [128881]="on",
- [128882]="on",
- [128883]="on",
- [128896]="on",
- [128897]="on",
- [128898]="on",
- [128899]="on",
- [128900]="on",
- [128901]="on",
- [128902]="on",
- [128903]="on",
- [128904]="on",
- [128905]="on",
- [128906]="on",
- [128907]="on",
- [128908]="on",
- [128909]="on",
- [128910]="on",
- [128911]="on",
- [128912]="on",
- [128913]="on",
- [128914]="on",
- [128915]="on",
- [128916]="on",
- [128917]="on",
- [128918]="on",
- [128919]="on",
- [128920]="on",
- [128921]="on",
- [128922]="on",
- [128923]="on",
- [128924]="on",
- [128925]="on",
- [128926]="on",
- [128927]="on",
- [128928]="on",
- [128929]="on",
- [128930]="on",
- [128931]="on",
- [128932]="on",
- [128933]="on",
- [128934]="on",
- [128935]="on",
- [128936]="on",
- [128937]="on",
- [128938]="on",
- [128939]="on",
- [128940]="on",
- [128941]="on",
- [128942]="on",
- [128943]="on",
- [128944]="on",
- [128945]="on",
- [128946]="on",
- [128947]="on",
- [128948]="on",
- [128949]="on",
- [128950]="on",
- [128951]="on",
- [128952]="on",
- [128953]="on",
- [128954]="on",
- [128955]="on",
- [128956]="on",
- [128957]="on",
- [128958]="on",
- [128959]="on",
- [128960]="on",
- [128961]="on",
- [128962]="on",
- [128963]="on",
- [128964]="on",
- [128965]="on",
- [128966]="on",
- [128967]="on",
- [128968]="on",
- [128969]="on",
- [128970]="on",
- [128971]="on",
- [128972]="on",
- [128973]="on",
- [128974]="on",
- [128975]="on",
- [128976]="on",
- [128977]="on",
- [128978]="on",
- [128979]="on",
- [128980]="on",
- [129024]="on",
- [129025]="on",
- [129026]="on",
- [129027]="on",
- [129028]="on",
- [129029]="on",
- [129030]="on",
- [129031]="on",
- [129032]="on",
- [129033]="on",
- [129034]="on",
- [129035]="on",
- [129040]="on",
- [129041]="on",
- [129042]="on",
- [129043]="on",
- [129044]="on",
- [129045]="on",
- [129046]="on",
- [129047]="on",
- [129048]="on",
- [129049]="on",
- [129050]="on",
- [129051]="on",
- [129052]="on",
- [129053]="on",
- [129054]="on",
- [129055]="on",
- [129056]="on",
- [129057]="on",
- [129058]="on",
- [129059]="on",
- [129060]="on",
- [129061]="on",
- [129062]="on",
- [129063]="on",
- [129064]="on",
- [129065]="on",
- [129066]="on",
- [129067]="on",
- [129068]="on",
- [129069]="on",
- [129070]="on",
- [129071]="on",
- [129072]="on",
- [129073]="on",
- [129074]="on",
- [129075]="on",
- [129076]="on",
- [129077]="on",
- [129078]="on",
- [129079]="on",
- [129080]="on",
- [129081]="on",
- [129082]="on",
- [129083]="on",
- [129084]="on",
- [129085]="on",
- [129086]="on",
- [129087]="on",
- [129088]="on",
- [129089]="on",
- [129090]="on",
- [129091]="on",
- [129092]="on",
- [129093]="on",
- [129094]="on",
- [129095]="on",
- [129104]="on",
- [129105]="on",
- [129106]="on",
- [129107]="on",
- [129108]="on",
- [129109]="on",
- [129110]="on",
- [129111]="on",
- [129112]="on",
- [129113]="on",
- [129120]="on",
- [129121]="on",
- [129122]="on",
- [129123]="on",
- [129124]="on",
- [129125]="on",
- [129126]="on",
- [129127]="on",
- [129128]="on",
- [129129]="on",
- [129130]="on",
- [129131]="on",
- [129132]="on",
- [129133]="on",
- [129134]="on",
- [129135]="on",
- [129136]="on",
- [129137]="on",
- [129138]="on",
- [129139]="on",
- [129140]="on",
- [129141]="on",
- [129142]="on",
- [129143]="on",
- [129144]="on",
- [129145]="on",
- [129146]="on",
- [129147]="on",
- [129148]="on",
- [129149]="on",
- [129150]="on",
- [129151]="on",
- [129152]="on",
- [129153]="on",
- [129154]="on",
- [129155]="on",
- [129156]="on",
- [129157]="on",
- [129158]="on",
- [129159]="on",
- [129168]="on",
- [129169]="on",
- [129170]="on",
- [129171]="on",
- [129172]="on",
- [129173]="on",
- [129174]="on",
- [129175]="on",
- [129176]="on",
- [129177]="on",
- [129178]="on",
- [129179]="on",
- [129180]="on",
- [129181]="on",
- [129182]="on",
- [129183]="on",
- [129184]="on",
- [129185]="on",
- [129186]="on",
- [129187]="on",
- [129188]="on",
- [129189]="on",
- [129190]="on",
- [129191]="on",
- [129192]="on",
- [129193]="on",
- [129194]="on",
- [129195]="on",
- [129196]="on",
- [129197]="on",
- [129280]="on",
- [129281]="on",
- [129282]="on",
- [129283]="on",
- [129284]="on",
- [129285]="on",
- [129286]="on",
- [129287]="on",
- [129288]="on",
- [129289]="on",
- [129290]="on",
- [129291]="on",
- [129296]="on",
- [129297]="on",
- [129298]="on",
- [129299]="on",
- [129300]="on",
- [129301]="on",
- [129302]="on",
- [129303]="on",
- [129304]="on",
- [129305]="on",
- [129306]="on",
- [129307]="on",
- [129308]="on",
- [129309]="on",
- [129310]="on",
- [129311]="on",
- [129312]="on",
- [129313]="on",
- [129314]="on",
- [129315]="on",
- [129316]="on",
- [129317]="on",
- [129318]="on",
- [129319]="on",
- [129320]="on",
- [129321]="on",
- [129322]="on",
- [129323]="on",
- [129324]="on",
- [129325]="on",
- [129326]="on",
- [129327]="on",
- [129328]="on",
- [129329]="on",
- [129330]="on",
- [129331]="on",
- [129332]="on",
- [129333]="on",
- [129334]="on",
- [129335]="on",
- [129336]="on",
- [129337]="on",
- [129338]="on",
- [129339]="on",
- [129340]="on",
- [129341]="on",
- [129342]="on",
- [129344]="on",
- [129345]="on",
- [129346]="on",
- [129347]="on",
- [129348]="on",
- [129349]="on",
- [129350]="on",
- [129351]="on",
- [129352]="on",
- [129353]="on",
- [129354]="on",
- [129355]="on",
- [129356]="on",
- [129360]="on",
- [129361]="on",
- [129362]="on",
- [129363]="on",
- [129364]="on",
- [129365]="on",
- [129366]="on",
- [129367]="on",
- [129368]="on",
- [129369]="on",
- [129370]="on",
- [129371]="on",
- [129372]="on",
- [129373]="on",
- [129374]="on",
- [129375]="on",
- [129376]="on",
- [129377]="on",
- [129378]="on",
- [129379]="on",
- [129380]="on",
- [129381]="on",
- [129382]="on",
- [129383]="on",
- [129384]="on",
- [129385]="on",
- [129386]="on",
- [129387]="on",
- [129408]="on",
- [129409]="on",
- [129410]="on",
- [129411]="on",
- [129412]="on",
- [129413]="on",
- [129414]="on",
- [129415]="on",
- [129416]="on",
- [129417]="on",
- [129418]="on",
- [129419]="on",
- [129420]="on",
- [129421]="on",
- [129422]="on",
- [129423]="on",
- [129424]="on",
- [129425]="on",
- [129426]="on",
- [129427]="on",
- [129428]="on",
- [129429]="on",
- [129430]="on",
- [129431]="on",
- [129472]="on",
- [129488]="on",
- [129489]="on",
- [129490]="on",
- [129491]="on",
- [129492]="on",
- [129493]="on",
- [129494]="on",
- [129495]="on",
- [129496]="on",
- [129497]="on",
- [129498]="on",
- [129499]="on",
- [129500]="on",
- [129501]="on",
- [129502]="on",
- [129503]="on",
- [129504]="on",
- [129505]="on",
- [129506]="on",
- [129507]="on",
- [129508]="on",
- [129509]="on",
- [129510]="on",
- [917505]="bn",
- [917536]="bn",
- [917537]="bn",
- [917538]="bn",
- [917539]="bn",
- [917540]="bn",
- [917541]="bn",
- [917542]="bn",
- [917543]="bn",
- [917544]="bn",
- [917545]="bn",
- [917546]="bn",
- [917547]="bn",
- [917548]="bn",
- [917549]="bn",
- [917550]="bn",
- [917551]="bn",
- [917552]="bn",
- [917553]="bn",
- [917554]="bn",
- [917555]="bn",
- [917556]="bn",
- [917557]="bn",
- [917558]="bn",
- [917559]="bn",
- [917560]="bn",
- [917561]="bn",
- [917562]="bn",
- [917563]="bn",
- [917564]="bn",
- [917565]="bn",
- [917566]="bn",
- [917567]="bn",
- [917568]="bn",
- [917569]="bn",
- [917570]="bn",
- [917571]="bn",
- [917572]="bn",
- [917573]="bn",
- [917574]="bn",
- [917575]="bn",
- [917576]="bn",
- [917577]="bn",
- [917578]="bn",
- [917579]="bn",
- [917580]="bn",
- [917581]="bn",
- [917582]="bn",
- [917583]="bn",
- [917584]="bn",
- [917585]="bn",
- [917586]="bn",
- [917587]="bn",
- [917588]="bn",
- [917589]="bn",
- [917590]="bn",
- [917591]="bn",
- [917592]="bn",
- [917593]="bn",
- [917594]="bn",
- [917595]="bn",
- [917596]="bn",
- [917597]="bn",
- [917598]="bn",
- [917599]="bn",
- [917600]="bn",
- [917601]="bn",
- [917602]="bn",
- [917603]="bn",
- [917604]="bn",
- [917605]="bn",
- [917606]="bn",
- [917607]="bn",
- [917608]="bn",
- [917609]="bn",
- [917610]="bn",
- [917611]="bn",
- [917612]="bn",
- [917613]="bn",
- [917614]="bn",
- [917615]="bn",
- [917616]="bn",
- [917617]="bn",
- [917618]="bn",
- [917619]="bn",
- [917620]="bn",
- [917621]="bn",
- [917622]="bn",
- [917623]="bn",
- [917624]="bn",
- [917625]="bn",
- [917626]="bn",
- [917627]="bn",
- [917628]="bn",
- [917629]="bn",
- [917630]="bn",
- [917631]="bn",
- },
- ["mirrors"]={
- [40]=41,
- [41]=40,
- [60]=62,
- [62]=60,
- [91]=93,
- [93]=91,
- [123]=125,
- [125]=123,
- [171]=187,
- [187]=171,
- [3898]=3899,
- [3899]=3898,
- [3900]=3901,
- [3901]=3900,
- [5787]=5788,
- [5788]=5787,
- [8249]=8250,
- [8250]=8249,
- [8261]=8262,
- [8262]=8261,
- [8317]=8318,
- [8318]=8317,
- [8333]=8334,
- [8334]=8333,
- [8712]=8715,
- [8713]=8716,
- [8714]=8717,
- [8715]=8712,
- [8716]=8713,
- [8717]=8714,
- [8725]=10741,
- [8764]=8765,
- [8765]=8764,
- [8771]=8909,
- [8786]=8787,
- [8787]=8786,
- [8788]=8789,
- [8789]=8788,
- [8804]=8805,
- [8805]=8804,
- [8806]=8807,
- [8807]=8806,
- [8808]=8809,
- [8809]=8808,
- [8810]=8811,
- [8811]=8810,
- [8814]=8815,
- [8815]=8814,
- [8816]=8817,
- [8817]=8816,
- [8818]=8819,
- [8819]=8818,
- [8820]=8821,
- [8821]=8820,
- [8822]=8823,
- [8823]=8822,
- [8824]=8825,
- [8825]=8824,
- [8826]=8827,
- [8827]=8826,
- [8828]=8829,
- [8829]=8828,
- [8830]=8831,
- [8831]=8830,
- [8832]=8833,
- [8833]=8832,
- [8834]=8835,
- [8835]=8834,
- [8836]=8837,
- [8837]=8836,
- [8838]=8839,
- [8839]=8838,
- [8840]=8841,
- [8841]=8840,
- [8842]=8843,
- [8843]=8842,
- [8847]=8848,
- [8848]=8847,
- [8849]=8850,
- [8850]=8849,
- [8856]=10680,
- [8866]=8867,
- [8867]=8866,
- [8870]=10974,
- [8872]=10980,
- [8873]=10979,
- [8875]=10981,
- [8880]=8881,
- [8881]=8880,
- [8882]=8883,
- [8883]=8882,
- [8884]=8885,
- [8885]=8884,
- [8886]=8887,
- [8887]=8886,
- [8905]=8906,
- [8906]=8905,
- [8907]=8908,
- [8908]=8907,
- [8909]=8771,
- [8912]=8913,
- [8913]=8912,
- [8918]=8919,
- [8919]=8918,
- [8920]=8921,
- [8921]=8920,
- [8922]=8923,
- [8923]=8922,
- [8924]=8925,
- [8925]=8924,
- [8926]=8927,
- [8927]=8926,
- [8928]=8929,
- [8929]=8928,
- [8930]=8931,
- [8931]=8930,
- [8932]=8933,
- [8933]=8932,
- [8934]=8935,
- [8935]=8934,
- [8936]=8937,
- [8937]=8936,
- [8938]=8939,
- [8939]=8938,
- [8940]=8941,
- [8941]=8940,
- [8944]=8945,
- [8945]=8944,
- [8946]=8954,
- [8947]=8955,
- [8948]=8956,
- [8950]=8957,
- [8951]=8958,
- [8954]=8946,
- [8955]=8947,
- [8956]=8948,
- [8957]=8950,
- [8958]=8951,
- [8968]=8969,
- [8969]=8968,
- [8970]=8971,
- [8971]=8970,
- [9001]=9002,
- [9002]=9001,
- [10088]=10089,
- [10089]=10088,
- [10090]=10091,
- [10091]=10090,
- [10092]=10093,
- [10093]=10092,
- [10094]=10095,
- [10095]=10094,
- [10096]=10097,
- [10097]=10096,
- [10098]=10099,
- [10099]=10098,
- [10100]=10101,
- [10101]=10100,
- [10179]=10180,
- [10180]=10179,
- [10181]=10182,
- [10182]=10181,
- [10184]=10185,
- [10185]=10184,
- [10187]=10189,
- [10189]=10187,
- [10197]=10198,
- [10198]=10197,
- [10205]=10206,
- [10206]=10205,
- [10210]=10211,
- [10211]=10210,
- [10212]=10213,
- [10213]=10212,
- [10214]=10215,
- [10215]=10214,
- [10216]=10217,
- [10217]=10216,
- [10218]=10219,
- [10219]=10218,
- [10220]=10221,
- [10221]=10220,
- [10222]=10223,
- [10223]=10222,
- [10627]=10628,
- [10628]=10627,
- [10629]=10630,
- [10630]=10629,
- [10631]=10632,
- [10632]=10631,
- [10633]=10634,
- [10634]=10633,
- [10635]=10636,
- [10636]=10635,
- [10637]=10640,
- [10638]=10639,
- [10639]=10638,
- [10640]=10637,
- [10641]=10642,
- [10642]=10641,
- [10643]=10644,
- [10644]=10643,
- [10645]=10646,
- [10646]=10645,
- [10647]=10648,
- [10648]=10647,
- [10680]=8856,
- [10688]=10689,
- [10689]=10688,
- [10692]=10693,
- [10693]=10692,
- [10703]=10704,
- [10704]=10703,
- [10705]=10706,
- [10706]=10705,
- [10708]=10709,
- [10709]=10708,
- [10712]=10713,
- [10713]=10712,
- [10714]=10715,
- [10715]=10714,
- [10741]=8725,
- [10744]=10745,
- [10745]=10744,
- [10748]=10749,
- [10749]=10748,
- [10795]=10796,
- [10796]=10795,
- [10797]=10798,
- [10798]=10797,
- [10804]=10805,
- [10805]=10804,
- [10812]=10813,
- [10813]=10812,
- [10852]=10853,
- [10853]=10852,
- [10873]=10874,
- [10874]=10873,
- [10877]=10878,
- [10878]=10877,
- [10879]=10880,
- [10880]=10879,
- [10881]=10882,
- [10882]=10881,
- [10883]=10884,
- [10884]=10883,
- [10891]=10892,
- [10892]=10891,
- [10897]=10898,
- [10898]=10897,
- [10899]=10900,
- [10900]=10899,
- [10901]=10902,
- [10902]=10901,
- [10903]=10904,
- [10904]=10903,
- [10905]=10906,
- [10906]=10905,
- [10907]=10908,
- [10908]=10907,
- [10913]=10914,
- [10914]=10913,
- [10918]=10919,
- [10919]=10918,
- [10920]=10921,
- [10921]=10920,
- [10922]=10923,
- [10923]=10922,
- [10924]=10925,
- [10925]=10924,
- [10927]=10928,
- [10928]=10927,
- [10931]=10932,
- [10932]=10931,
- [10939]=10940,
- [10940]=10939,
- [10941]=10942,
- [10942]=10941,
- [10943]=10944,
- [10944]=10943,
- [10945]=10946,
- [10946]=10945,
- [10947]=10948,
- [10948]=10947,
- [10949]=10950,
- [10950]=10949,
- [10957]=10958,
- [10958]=10957,
- [10959]=10960,
- [10960]=10959,
- [10961]=10962,
- [10962]=10961,
- [10963]=10964,
- [10964]=10963,
- [10965]=10966,
- [10966]=10965,
- [10974]=8870,
- [10979]=8873,
- [10980]=8872,
- [10981]=8875,
- [10988]=10989,
- [10989]=10988,
- [10999]=11000,
- [11000]=10999,
- [11001]=11002,
- [11002]=11001,
- [11778]=11779,
- [11779]=11778,
- [11780]=11781,
- [11781]=11780,
- [11785]=11786,
- [11786]=11785,
- [11788]=11789,
- [11789]=11788,
- [11804]=11805,
- [11805]=11804,
- [11808]=11809,
- [11809]=11808,
- [11810]=11811,
- [11811]=11810,
- [11812]=11813,
- [11813]=11812,
- [11814]=11815,
- [11815]=11814,
- [11816]=11817,
- [11817]=11816,
- [12296]=12297,
- [12297]=12296,
- [12298]=12299,
- [12299]=12298,
- [12300]=12301,
- [12301]=12300,
- [12302]=12303,
- [12303]=12302,
- [12304]=12305,
- [12305]=12304,
- [12308]=12309,
- [12309]=12308,
- [12310]=12311,
- [12311]=12310,
- [12312]=12313,
- [12313]=12312,
- [12314]=12315,
- [12315]=12314,
- [65113]=65114,
- [65114]=65113,
- [65115]=65116,
- [65116]=65115,
- [65117]=65118,
- [65118]=65117,
- [65124]=65125,
- [65125]=65124,
- [65288]=65289,
- [65289]=65288,
- [65308]=65310,
- [65310]=65308,
- [65339]=65341,
- [65341]=65339,
- [65371]=65373,
- [65373]=65371,
- [65375]=65376,
- [65376]=65375,
- [65378]=65379,
- [65379]=65378,
- },
- ["textclasses"]={
- [40]="open",
- [41]="close",
- [60]="open",
- [62]="close",
- [91]="open",
- [93]="close",
- [123]="open",
- [125]="close",
- [171]="open",
- [187]="close",
- [8249]="open",
- [8250]="close",
- [8317]="open",
- [8318]="close",
- [8333]="open",
- [8334]="close",
- [10647]="open",
- [10648]="close",
- [65113]="open",
- [65114]="close",
- [65115]="open",
- [65116]="close",
- [65117]="open",
- [65118]="close",
- [65124]="open",
- [65125]="close",
- [65288]="open",
- [65289]="close",
- [65308]="open",
- [65310]="close",
- [65339]="open",
- [65341]="close",
- [65371]="open",
- [65373]="close",
- [65375]="open",
- [65376]="close",
- [65378]="open",
- [65379]="close",
- },
-}
diff --git a/context/data/textadept/context/data/scite-context-data-context.lua b/context/data/textadept/context/data/scite-context-data-context.lua
deleted file mode 100644
index 315e98bef..000000000
--- a/context/data/textadept/context/data/scite-context-data-context.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-return {
- ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "pluseleven", "plustwelve", "plussixteen", "plusfifty", "plushundred", "plusonehundred", "plustwohundred", "plusfivehundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxcount", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "directionlefttoright", "directionrighttoleft", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "prerollrun", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "texenginefunctionality", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "contextformat", "contextversion", "contextlmtxmode", "contextmark", "mksuffix", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "statuswrite", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "ctdcatcodes", "rlncatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "commaasciicode", "spaceasciicode", "periodasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "leftparentasciicode", "rightparentasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "hyphenasciicode", "percentasciicode", "leftbracketasciicode", "rightbracketasciicode", "hsizefrozenparcode", "skipfrozenparcode", "hangfrozenparcode", "indentfrozenparcode", "parfillfrozenparcode", "adjustfrozenparcode", "protrudefrozenparcode", "tolerancefrozenparcode", "stretchfrozenparcode", "loosenessfrozenparcode", "lastlinefrozenparcode", "linepenaltyfrozenparcode", "clubpenaltyfrozenparcode", "widowpenaltyfrozenparcode", "displaypenaltyfrozenparcode", "brokenpenaltyfrozenparcode", "demeritsfrozenparcode", "shapefrozenparcode", "linefrozenparcode", "hyphenationfrozenparcode", "allfrozenparcode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vadjustgroupcode", "vcentergroupcode", "mathabovegroupcode", "mathchoicegroupcode", "alsosimplegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "localboxgroupcode", "splitoffgroupcode", "splitkeepgroupcode", "preamblegroupcode", "alignsetgroupcode", "finrowgroupcode", "discretionarygroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "overrulemathcontrolcode", "underrulemathcontrolcode", "radicalrulemathcontrolcode", "fractionrulemathcontrolcode", "accentskewhalfmathcontrolcode", "accentskewapplymathcontrolcode", "accentitalickernmathcontrolcode", "delimiteritalickernmathcontrolcode", "orditalickernmathcontrolcode", "charitalicwidthmathcontrolcode", "charitalicnoreboxmathcontrolcode", "boxednoitalickernmathcontrolcode", "nostaircasekernmathcontrolcode", "textitalickernmathcontrolcode", "noligaturingglyphoptioncode", "nokerningglyphoptioncode", "noexpansionglyphoptioncode", "noprotrusionglyphoptioncode", "noleftkerningglyphoptioncode", "noleftligaturingglyphoptioncode", "norightkerningglyphoptioncode", "norightligaturingglyphoptioncode", "noitaliccorrectionglyphoptioncode", "normalparcontextcode", "vmodeparcontextcode", "vboxparcontextcode", "vtopparcontextcode", "vcenterparcontextcode", "vadjustparcontextcode", "insertparcontextcode", "outputparcontextcode", "alignparcontextcode", "noalignparcontextcode", "spanparcontextcode", "resetparcontextcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "mathexheight", "mathemwidth", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathaxisheight", "muquad", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode", "startmodeset", "stopmodeset", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "unexpandeddocumentvariable", "setupdocument", "presetdocument", "doifelsedocumentvariable", "doifdocumentvariableelse", "doifdocumentvariable", "doifnotdocumentvariable", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "everystarttext", "everystoptext", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "definemode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "lrm", "rlm", "bidilre", "bidirle", "bidipop", "bidilro", "bidirlo", "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "enquad", "emquad", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj", "optionalspace", "asciispacechar", "softhyphen", "Ux", "eUx", "Umathaccents", "parfillleftskip", "parfillrightskip", "startlmtxmode", "stoplmtxmode", "startmkivmode", "stopmkivmode", "wildcardsymbol", "normalhyphenationcode", "automatichyphenationcode", "explicithyphenationcode", "syllablehyphenationcode", "uppercasehyphenationcode", "collapsehyphenationmcode", "compoundhyphenationcode", "strictstarthyphenationcode", "strictendhyphenationcode", "automaticpenaltyhyphenationcode", "explicitpenaltyhyphenationcode", "permitgluehyphenationcode", "permitallhyphenationcode", "permitmathreplacehyphenationcode", "forcecheckhyphenationcode", "lazyligatureshyphenationcode", "forcehandlerhyphenationcode", "feedbackcompoundhyphenationcode", "ignoreboundshyphenationcode", "partialhyphenationcode", "completehyphenationcode", "normalizelinenormalizecode", "parindentskipnormalizecode", "swaphangindentnormalizecode", "swapparsshapenormalizecode", "breakafterdirnormalizecode", "removemarginkernsnormalizecode", "clipwidthnormalizecode", "flattendiscretionariesnormalizecode", "discardzerotabskipsnormalizecode", "noligaturingglyphoptioncode", "nokerningglyphoptioncode", "noleftligatureglyphoptioncode", "noleftkernglyphoptioncode", "norightligatureglyphoptioncode", "norightkernglyphoptioncode", "noexpansionglyphoptioncode", "noprotrusionglyphoptioncode", "noitaliccorrectionglyphoptioncode", "nokerningcode", "noligaturingcode", "frozenflagcode", "tolerantflagcode", "protectedflagcode", "primitiveflagcode", "permanentflagcode", "noalignedflagcode", "immutableflagcode", "mutableflagcode", "globalflagcode", "overloadedflagcode", "immediateflagcode", "conditionalflagcode", "valueflagcode", "instanceflagcode", "ordmathflattencode", "binmathflattencode", "relmathflattencode", "punctmathflattencode", "innermathflattencode", "normalworddiscoptioncode", "preworddiscoptioncode", "postworddiscoptioncode", "continuewhenlmtxmode" },
- ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "startcontextdefinitioncode", "stopcontextdefinitioncode", "texdefinition", "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup", "copysetups", "resetsetups", "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "globalsetmode", "globalresetmode", "globalsetsystemmode", "globalresetsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "autorule", "strippedcsname", "checkedstrippedcsname", "nofarguments", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthargumenttrue", "sixthargumentfalse", "sixthargumenttrue", "seventhargumentfalse", "seventhargumenttrue", "vkern", "hkern", "vpenalty", "hpenalty", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "untraceddonothing", "dontcomplain", "lessboxtracing", "forgetall", "donetrue", "donefalse", "foundtrue", "foundfalse", "inlineordisplaymath", "indisplaymath", "forcedisplaymath", "startforceddisplaymath", "stopforceddisplaymath", "startpickupmath", "stoppickupmath", "reqno", "mathortext", "thebox", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilll", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "normalhfilllneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilll", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "normalvfilllneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhpack", "ruledvpack", "ruledtpack", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchstring", "scratchstringone", "scratchstringtwo", "tempstring", "scratchcounter", "globalscratchcounter", "privatescratchcounter", "scratchdimen", "globalscratchdimen", "privatescratchdimen", "scratchskip", "globalscratchskip", "privatescratchskip", "scratchmuskip", "globalscratchmuskip", "privatescratchmuskip", "scratchtoks", "globalscratchtoks", "privatescratchtoks", "scratchbox", "globalscratchbox", "privatescratchbox", "scratchmacro", "scratchmacroone", "scratchmacrotwo", "scratchconditiontrue", "scratchconditionfalse", "ifscratchcondition", "scratchconditiononetrue", "scratchconditiononefalse", "ifscratchconditionone", "scratchconditiontwotrue", "scratchconditiontwofalse", "ifscratchconditiontwo", "globalscratchcounterone", "globalscratchcountertwo", "globalscratchcounterthree", "groupedcommand", "groupedcommandcs", "triggergroupedcommand", "triggergroupedcommandcs", "simplegroupedcommand", "simplegroupedcommandcs", "pickupgroupedcommand", "pickupgroupedcommandcs", "usedbaselineskip", "usedlineskip", "usedlineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction", "next", "nexttoken", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "flushnextbox", "boxisempty", "boxtostring", "contentostring", "prerolltostring", "givenwidth", "givenheight", "givendepth", "scangivendimensions", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchtotal", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchcounterfour", "scratchcounterfive", "scratchcountersix", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchdimenfour", "scratchdimenfive", "scratchdimensix", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchskipfour", "scratchskipfive", "scratchskipsix", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchmuskipfour", "scratchmuskipfive", "scratchmuskipsix", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchtoksfour", "scratchtoksfive", "scratchtokssix", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchboxfour", "scratchboxfive", "scratchboxsix", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchmin", "scratchmax", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "firstinset", "doifinset", "doifnotinset", "doifelseinset", "doifinsetelse", "doifelsenextchar", "doifnextcharelse", "doifelsenextcharcs", "doifnextcharcselse", "doifelsenextoptional", "doifnextoptionalelse", "doifelsenextoptionalcs", "doifnextoptionalcselse", "doifelsefastoptionalcheck", "doiffastoptionalcheckelse", "doifelsefastoptionalcheckcs", "doiffastoptionalcheckcselse", "doifelsenextbgroup", "doifnextbgroupelse", "doifelsenextbgroupcs", "doifnextbgroupcselse", "doifelsenextparenthesis", "doifnextparenthesiselse", "doifelseundefined", "doifundefinedelse", "doifelsedefined", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifnothingelse", "doifelsesomething", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifvaluenothingelse", "doifelsedimension", "doifdimensionelse", "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber", "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse", "doifelseassignment", "doifassignmentelse", "docheckassignment", "doifelseassignmentcs", "doifassignmentelsecs", "validassignment", "novalidassignment", "doiftext", "doifelsetext", "doiftextelse", "doifnottext", "quitcondition", "truecondition", "falsecondition", "tracingall", "tracingnone", "loggingall", "tracingcatcodes", "showluatokens", "aliasmacro", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "emspace", "charspace", "nbsp", "crlf", "obeyspaces", "obeylines", "obeytabs", "obeypages", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage", "normalspace", "naturalspace", "controlspace", "normalspaces", "ignoretabs", "ignorelines", "ignorepages", "ignoreeofs", "setcontrolspaces", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "removepunctuation", "ignoreparskip", "forcestrutdepth", "onlynonbreakablespace", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "directmeasure", "setquantity", "setequantity", "setgquantity", "setxquantity", "definequantity", "freezequantity", "quantity", "quantitied", "directquantity", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "setexpandeddummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "firstoftwounexpanded", "secondoftwounexpanded", "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "fastloopindex", "fastloopfinal", "dowith", "doloopovermatch", "doloopovermatched", "doloopoverlist", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "setconditional", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "onlypdfobjectcompression", "nopdfobjectcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "offinterlineskip", "oninterlineskip", "nointerlineskip", "strut", "halfstrut", "quarterstrut", "depthstrut", "halflinestrut", "noheightstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "strutgap", "begstrut", "endstrut", "lineheight", "leftboundary", "rightboundary", "signalcharacter", "aligncontentleft", "aligncontentmiddle", "aligncontentright", "shiftbox", "vpackbox", "hpackbox", "vpackedbox", "hpackedbox", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "normalsuperscript", "normalsubscript", "normalnosuperscript", "normalnosubscript", "superscript", "subscript", "nosuperscript", "nosubscript", "superprescript", "subprescript", "nosuperprescript", "nosubsprecript", "uncramped", "cramped", "mathstyletrigger", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "pushmathstyle", "popmathstyle", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expelsedoif", "expdoif", "expdoifnot", "expdoifelsecommon", "expdoifcommonelse", "expdoifelseinset", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "ctxluamatch", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "startctxfunction", "stopctxfunction", "ctxfunction", "startctxfunctiondefinition", "stopctxfunctiondefinition", "installctxfunction", "installprotectedctxfunction", "installprotectedctxscanner", "installctxscanner", "resetctxscanner", "cldprocessfile", "cldloadfile", "cldloadviafile", "cldcontext", "cldcommand", "carryoverpar", "freezeparagraphproperties", "defrostparagraphproperties", "setparagraphfreezing", "forgetparagraphfreezing", "updateparagraphproperties", "updateparagraphpenalties", "updateparagraphdemerits", "updateparagraphshapes", "updateparagraphlines", "lastlinewidth", "assumelongusagecs", "Umathbotaccent", "Umathtopaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "leftorrighthbox", "leftorrightvbox", "leftorrightvtop", "lefttoright", "righttoleft", "checkedlefttoright", "checkedrighttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "dirlre", "dirrle", "dirlro", "dirrlo", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "dohyphencollapsing", "nohyphencollapsing", "compounddiscretionary", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath", "break", "nobreak", "allowbreak", "goodbreak", "nospace", "nospacing", "dospacing", "naturalhbox", "naturalvbox", "naturalvtop", "naturalhpack", "naturalvpack", "naturaltpack", "reversehbox", "reversevbox", "reversevtop", "reversehpack", "reversevpack", "reversetpack", "hcontainer", "vcontainer", "tcontainer", "frule", "compoundhyphenpenalty", "start", "stop", "unsupportedcs", "openout", "closeout", "write", "openin", "closein", "read", "readline", "readfromterminal", "boxlines", "boxline", "setboxline", "copyboxline", "boxlinewd", "boxlineht", "boxlinedp", "boxlinenw", "boxlinenh", "boxlinend", "boxlinels", "boxliners", "boxlinelh", "boxlinerh", "boxlinelp", "boxlinerp", "boxlinein", "boxrangewd", "boxrangeht", "boxrangedp", "bitwiseset", "bitwiseand", "bitwiseor", "bitwisexor", "bitwisenot", "bitwisenil", "ifbitwiseand", "bitwise", "bitwiseshift", "bitwiseflip", "textdir", "linedir", "pardir", "boxdir", "prelistbox", "postlistbox", "prelistcopy", "postlistcopy", "setprelistbox", "setpostlistbox", "noligaturing", "nokerning", "noexpansion", "noprotrusion", "noleftkerning", "noleftligaturing", "norightkerning", "norightligaturing", "noitaliccorrection", "futureletnexttoken", "defbackslashbreak", "letbackslashbreak", "pushoverloadmode", "popoverloadmode", "pushrunstate", "poprunstate", "suggestedalias", "showboxhere", "discoptioncodestring", "flagcodestring", "frozenparcodestring", "glyphoptioncodestring", "groupcodestring", "hyphenationcodestring", "mathcontrolcodestring", "mathflattencodestring", "normalizecodestring", "parcontextcodestring", "newlocalcount", "newlocaldimen", "newlocalskip", "newlocalmuskip", "newlocaltoks", "newlocalbox", "newlocalwrite", "newlocalread", "setnewlocalcount", "setnewlocaldimen", "setnewlocalskip", "setnewlocalmuskip", "setnewlocaltoks", "setnewlocalbox", "ifexpression" },
-} \ No newline at end of file
diff --git a/context/data/textadept/context/data/scite-context-data-interfaces.lua b/context/data/textadept/context/data/scite-context-data-interfaces.lua
deleted file mode 100644
index d2b000645..000000000
--- a/context/data/textadept/context/data/scite-context-data-interfaces.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-return {
- ["common"]={ "AEacute", "AEligature", "AEmacron", "AMSTEX", "Aacute", "Abreve", "Abreveacute", "Abrevedotbelow", "Abrevegrave", "Abrevehook", "Abrevetilde", "Acaron", "Acircumflex", "Acircumflexacute", "Acircumflexdotbelow", "Acircumflexgrave", "Acircumflexhook", "Acircumflextilde", "Adiaeresis", "Adiaeresismacron", "Adotaccent", "Adotaccentmacron", "Adotbelow", "Adoublegrave", "AfterPar", "Agrave", "Ahook", "Ainvertedbreve", "Alpha", "Alphabeticnumerals", "AmSTeX", "Amacron", "And", "Angstrom", "Aogonek", "Aring", "Aringacute", "Arrowvert", "Astroke", "Atilde", "BeforePar", "Beta", "Bhook", "Big", "Bigg", "Biggl", "Biggm", "Biggr", "Bigl", "Bigm", "Bigr", "Box", "Bumpeq", "CONTEXT", "Cacute", "Cap", "Caps", "Ccaron", "Ccedilla", "Ccircumflex", "Cdotaccent", "Character", "Characters", "Chi", "Chook", "ConTeXt", "Context", "ConvertConstantAfter", "ConvertToConstant", "Cstroke", "Cup", "DAYLONG", "DAYSHORT", "DZcaronligature", "DZligature", "Dafrican", "Dcaron", "Ddownarrow", "Delta", "Dhook", "Doteq", "Downarrow", "Dstroke", "Dzcaronligature", "Dzligature", "ETEX", "Eacute", "Ebreve", "Ecaron", "Ecedilla", "Ecircumflex", "Ecircumflexacute", "Ecircumflexdotbelow", "Ecircumflexgrave", "Ecircumflexhook", "Ecircumflextilde", "Ediaeresis", "Edotaccent", "Edotbelow", "Edoublegrave", "Egrave", "Ehook", "Einvertedbreve", "Emacron", "Eogonek", "Epsilon", "Eta", "Eth", "Etilde", "Eulerconst", "EveryLine", "EveryPar", "Fhook", "Finv", "Gacute", "Game", "Gamma", "Gbreve", "Gcaron", "Gcircumflex", "Gcommaaccent", "Gdotaccent", "GetPar", "Ghook", "GotoPar", "Greeknumerals", "Gstroke", "Hat", "Hcaron", "Hcircumflex", "Hstroke", "IJligature", "INRSTEX", "Iacute", "Ibreve", "Icaron", "Icircumflex", "Idiaeresis", "Idotaccent", "Idotbelow", "Idoublegrave", "Igrave", "Ihook", "Iinvertedbreve", "Im", "Imacron", "Iogonek", "Iota", "Istroke", "Itilde", "Jcircumflex", "Join", "Kappa", "Kcaron", "Kcommaaccent", "Khook", "LAMSTEX", "LATEX", "LJligature", "LUAJITTEX", "LUAMETATEX", "LUATEX", "LaTeX", "Lacute", "LamSTeX", "Lambda", "Lbar", "Lcaron", "Lcommaaccent", "Ldotmiddle", "Ldsh", "Leftarrow", "Leftrightarrow", "Ljligature", "Lleftarrow", "Longleftarrow", "Longleftrightarrow", "Longmapsfrom", "Longmapsto", "Longrightarrow", "Lsh", "Lstroke", "Lua", "LuaMetaTeX", "LuaTeX", "LuajitTeX", "METAFONT", "METAFUN", "METAPOST", "MKII", "MKIV", "MKIX", "MKLX", "MKVI", "MKXI", "MKXL", "MONTH", "MONTHLONG", "MONTHSHORT", "MPII", "MPIV", "MPLX", "MPVI", "MPXL", "MPanchor", "MPbetex", "MPc", "MPcode", "MPcolor", "MPcoloronly", "MPcolumn", "MPd", "MPdrawing", "MPfontsizehskip", "MPgetmultipars", "MPgetmultishape", "MPgetposboxes", "MPh", "MPinclusions", "MPleftskip", "MPll", "MPlr", "MPls", "MPmenubuttons", "MPn", "MPoptions", "MPoverlayanchor", "MPp", "MPpage", "MPpardata", "MPplus", "MPpos", "MPpositiongraphic", "MPposset", "MPr", "MPrawvar", "MPregion", "MPrest", "MPrightskip", "MPrs", "MPstring", "MPtext", "MPtransparency", "MPul", "MPur", "MPv", "MPvar", "MPvariable", "MPvv", "MPw", "MPwhd", "MPx", "MPxy", "MPxywhd", "MPy", "Mapsfrom", "Mapsto", "MetaFont", "MetaFun", "MetaPost", "Mu", "NJligature", "Nacute", "Ncaron", "Ncommaaccent", "Nearrow", "Neng", "Ngrave", "Njligature", "NormalizeFontHeight", "NormalizeFontWidth", "NormalizeTextHeight", "NormalizeTextWidth", "Ntilde", "Nu", "Numbers", "Nwarrow", "OEligature", "Oacute", "Obreve", "Ocaron", "Ocircumflex", "Ocircumflexacute", "Ocircumflexdotbelow", "Ocircumflexgrave", "Ocircumflexhook", "Ocircumflextilde", "Odiaeresis", "Odiaeresismacron", "Odotaccent", "Odotaccentmacron", "Odotbelow", "Odoublegrave", "Ograve", "Ohook", "Ohorn", "Ohornacute", "Ohorndotbelow", "Ohorngrave", "Ohornhook", "Ohorntilde", "Ohungarumlaut", "Oinvertedbreve", "Omacron", "Omega", "Omicron", "Oogonek", "Oogonekmacron", "Ostroke", "Ostrokeacute", "Otilde", "Otildemacron", "P", "PDFETEX", "PDFTEX", "PDFcolor", "PICTEX", "PPCHTEX", "PPCHTeX", "PRAGMA", "Phi", "Phook", "Pi", "PiCTeX", "Plankconst", "PointsToBigPoints", "PointsToReal", "PointsToWholeBigPoints", "PropertyLine", "Psi", "PtToCm", "Racute", "Rcaron", "Rcommaaccent", "Rdoublegrave", "Rdsh", "Re", "ReadFile", "Relbar", "Rho", "Rightarrow", "Rinvertedbreve", "Romannumerals", "Rrightarrow", "Rsh", "S", "Sacute", "ScaledPointsToBigPoints", "ScaledPointsToWholeBigPoints", "Scaron", "Scedilla", "Schwa", "Scircumflex", "Scommaaccent", "Searrow", "Sigma", "Smallcapped", "Subset", "Supset", "Swarrow", "TABLE", "TEX", "TaBlE", "Tau", "Tcaron", "Tcedilla", "Tcommaaccent", "TeX", "TheNormalizedFontSize", "Theta", "Thook", "Thorn", "TransparencyHack", "Tstroke", "Uacute", "Ubreve", "Ucaron", "Ucircumflex", "Udiaeresis", "Udiaeresisacute", "Udiaeresiscaron", "Udiaeresisgrave", "Udiaeresismacron", "Udotbelow", "Udoublegrave", "Ugrave", "Uhook", "Uhorn", "Uhornacute", "Uhorndotbelow", "Uhorngrave", "Uhornhook", "Uhorntilde", "Uhungarumlaut", "Uinvertedbreve", "Umacron", "Uogonek", "Uparrow", "Updownarrow", "Upsilon", "Uring", "Utilde", "Uuparrow", "VDash", "Vdash", "VerboseNumber", "Vert", "Vhook", "Vvdash", "WEEKDAY", "WORD", "WORDS", "Wcircumflex", "WidthSpanningText", "Word", "Words", "XETEX", "XeTeX", "Xi", "Yacute", "Ycircumflex", "Ydiaeresis", "Ydotbelow", "Ygrave", "Yhook", "Ymacron", "Ytilde", "Zacute", "Zcaron", "Zdotaccent", "Zeta", "Zhook", "Zstroke", "aacute", "abbreviation", "abjadnaivenumerals", "abjadnodotnumerals", "abjadnumerals", "about", "abreve", "abreveacute", "abrevedotbelow", "abrevegrave", "abrevehook", "abrevetilde", "acaron", "acircumflex", "acircumflexacute", "acircumflexdotbelow", "acircumflexgrave", "acircumflexhook", "acircumflextilde", "activatespacehandler", "actualday", "actualmonth", "actualyear", "actuarial", "acute", "acwopencirclearrow", "adaptcollector", "adaptfontfeature", "adaptlayout", "adaptpapersize", "addfeature", "addtoJSpreamble", "addtocommalist", "addvalue", "adiaeresis", "adiaeresismacron", "adotaccent", "adotaccentmacron", "adotbelow", "adoublegrave", "aeacute", "aeligature", "aemacron", "afghanicurrency", "aftersplitstring", "aftertestandsplitstring", "agrave", "ahook", "ainvertedbreve", "aleph", "alignbottom", "aligned", "alignedbox", "alignedline", "alignhere", "alignmentcharacter", "allinputpaths", "alpha", "alphabeticnumerals", "alwayscitation", "alwayscite", "amacron", "amalg", "ampersand", "anchor", "angle", "aogonek", "appendetoks", "appendgvalue", "appendtocommalist", "appendtoks", "appendtoksonce", "appendvalue", "apply", "applyalternativestyle", "applyprocessor", "applytocharacters", "applytofirstcharacter", "applytosplitstringchar", "applytosplitstringcharspaced", "applytosplitstringline", "applytosplitstringlinespaced", "applytosplitstringword", "applytosplitstringwordspaced", "applytowords", "approx", "approxEq", "approxeq", "approxnEq", "arabicakbar", "arabicalayhe", "arabicallah", "arabicallallahou", "arabicasterisk", "arabicbasmalah", "arabiccomma", "arabiccuberoot", "arabicdateseparator", "arabicdecimals", "arabicdisputedendofayah", "arabicendofayah", "arabicexnumerals", "arabicfootnotemarker", "arabicfourthroot", "arabichighain", "arabichighalayheassallam", "arabichigheqala", "arabichighesala", "arabichighfootnotemarker", "arabichighjeem", "arabichighlamalef", "arabichighmadda", "arabichighmeemlong", "arabichighmeemshort", "arabichighnisf", "arabichighnoon", "arabichighnoonkasra", "arabichighqaf", "arabichighqif", "arabichighradiallahouanhu", "arabichighrahmatullahalayhe", "arabichighrubc", "arabichighsad", "arabichighsajda", "arabichighsakta", "arabichighsallallahou", "arabichighseen", "arabichighsmallsafha", "arabichightah", "arabichightakhallus", "arabichighthalatha", "arabichighwaqf", "arabichighyeh", "arabichighzain", "arabicjallajalalouhou", "arabiclettermark", "arabiclowmeemlong", "arabiclownoonkasra", "arabiclowseen", "arabicmisra", "arabicmuhammad", "arabicnumber", "arabicnumberabove", "arabicnumerals", "arabicparenleft", "arabicparenright", "arabicpercent", "arabicperiod", "arabicpermille", "arabicpertenthousand", "arabicpoeticverse", "arabicqala", "arabicquestion", "arabicrasoul", "arabicray", "arabicrialsign", "arabicsafha", "arabicsajdah", "arabicsalla", "arabicsamvat", "arabicsanah", "arabicsemicolon", "arabicshighthreedots", "arabicslcm", "arabicstartofrubc", "arabictripledot", "arabicvowelwaw", "arabicvowelyeh", "arabicwasallam", "arg", "aring", "aringacute", "arrowvert", "asciimode", "asciistr", "aside", "assignalfadimension", "assigndimen", "assigndimension", "assignifempty", "assigntranslation", "assignvalue", "assignwidth", "assumelongusagecs", "ast", "astype", "asymp", "at", "atilde", "atleftmargin", "atpage", "atrightmargin", "attachment", "autocap", "autodirhbox", "autodirvbox", "autodirvtop", "autoinsertnextspace", "autointegral", "automathematics", "autoorientation", "autopagestaterealpage", "autopagestaterealpageorder", "autorule", "autosetups", "availablehsize", "averagecharwidth", "backepsilon", "background", "backgroundimage", "backgroundimagefill", "backgroundline", "backprime", "backsim", "backslash", "bar", "barleftarrow", "barleftarrowrightarrowbar", "barovernorthwestarrow", "barwedge", "basegrid", "baselinebottom", "baselineleftbox", "baselinemiddlebox", "baselinerightbox", "bbordermatrix", "bbox", "because", "beforesplitstring", "beforetestandsplitstring", "beta", "beth", "between", "bhook", "big", "bigbodyfont", "bigcap", "bigcirc", "bigcircle", "bigcup", "bigdiamond", "bigg", "bigger", "biggl", "biggm", "biggr", "bigl", "bigm", "bigodot", "bigoplus", "bigotimes", "bigr", "bigskip", "bigsqcap", "bigsqcup", "bigsquare", "bigstar", "bigtimes", "bigtriangledown", "bigtriangleup", "bigudot", "biguplus", "bigvee", "bigwedge", "binom", "bitmapimage", "blacklozenge", "blackrule", "blackrules", "blacksquare", "blacktriangle", "blacktriangledown", "blacktriangleleft", "blacktriangleright", "blank", "blap", "bleed", "bleedheight", "bleedwidth", "blockligatures", "blockquote", "blocksynctexfile", "blockuservariable", "bodyfontenvironmentlist", "bodyfontsize", "bold", "boldface", "bolditalic", "boldslanted", "bookmark", "booleanmodevalue", "bordermatrix", "bot", "bottombox", "bottomleftbox", "bottomrightbox", "bowtie", "boxcursor", "boxdot", "boxmarker", "boxminus", "boxofsize", "boxplus", "boxreference", "boxtimes", "bpos", "breakablethinspace", "breakhere", "breve", "bstroke", "btxabbreviatedjournal", "btxaddjournal", "btxalwayscitation", "btxauthorfield", "btxdetail", "btxdirect", "btxdoif", "btxdoifcombiinlistelse", "btxdoifelse", "btxdoifelsecombiinlist", "btxdoifelsesameasprevious", "btxdoifelsesameaspreviouschecked", "btxdoifelseuservariable", "btxdoifnot", "btxdoifsameaspreviouscheckedelse", "btxdoifsameaspreviouselse", "btxdoifuservariableelse", "btxexpandedjournal", "btxfield", "btxfieldname", "btxfieldtype", "btxfirstofrange", "btxflush", "btxflushauthor", "btxflushauthorinverted", "btxflushauthorinvertedshort", "btxflushauthorname", "btxflushauthornormal", "btxflushauthornormalshort", "btxflushsuffix", "btxfoundname", "btxfoundtype", "btxhiddencitation", "btxhybridcite", "btxlabellanguage", "btxlabeltext", "btxlistcitation", "btxloadjournalist", "btxoneorrange", "btxremapauthor", "btxsavejournalist", "btxsetup", "btxsingularorplural", "btxsingularplural", "btxtextcitation", "buildmathaccent", "buildtextaccent", "buildtextbottomcomma", "buildtextbottomdot", "buildtextcedilla", "buildtextgrave", "buildtextmacron", "buildtextognek", "bullet", "button", "cacute", "calligraphic", "camel", "cap", "capital", "carriagereturn", "catcodetablename", "cbox", "ccaron", "ccedilla", "ccircumflex", "ccurl", "cdot", "cdotaccent", "cdotp", "cdots", "centeraligned", "centerbox", "centerdot", "centeredbox", "centeredlastline", "centerednextbox", "centerline", "cfrac", "chapter", "character", "characters", "chardescription", "charwidthlanguage", "check", "checkcharacteralign", "checkedblank", "checkedchar", "checkedfiller", "checkedstrippedcsname", "checkinjector", "checkmark", "checknextindentation", "checknextinjector", "checkpage", "checkparameters", "checkpreviousinjector", "checksoundtrack", "checktwopassdata", "checkvariables", "chem", "chemical", "chemicalbottext", "chemicalmidtext", "chemicalsymbol", "chemicaltext", "chemicaltoptext", "chi", "chineseallnumerals", "chinesecapnumerals", "chinesenumerals", "chook", "circ", "circeq", "circlearrowleft", "circlearrowright", "circledR", "circledS", "circledast", "circledcirc", "circleddash", "circledequals", "circleonrightarrow", "citation", "cite", "clap", "classfont", "cldcommand", "cldcontext", "cldloadfile", "cldprocessfile", "cleftarrow", "clip", "clippedoverlayimage", "clonefield", "clubsuit", "collect", "collectedtext", "collectexpanded", "colon", "coloncolonequals", "colonequals", "color", "colorbar", "colorcomponents", "colored", "coloronly", "colorvalue", "column", "columnbreak", "columnsetspanwidth", "combinepages", "commalistelement", "commalistsentence", "commalistsize", "comment", "comparecolorgroup", "comparedimension", "comparedimensioneps", "comparepalet", "complement", "completebtxrendering", "completecontent", "completeindex", "completelist", "completelistofabbreviations", "completelistofchemicals", "completelistoffigures", "completelistofgraphics", "completelistofintermezzi", "completelistoflogos", "completelistofpublications", "completelistofsorts", "completelistofsynonyms", "completelistoftables", "completepagenumber", "completeregister", "complexes", "complexorsimple", "complexorsimpleempty", "component", "composedcollector", "composedlayer", "compounddiscretionary", "compresult", "cong", "constantdimen", "constantdimenargument", "constantemptyargument", "constantnumber", "constantnumberargument", "contentreference", "continuednumber", "continueifinputfile", "convertargument", "convertcommand", "convertedcounter", "converteddimen", "convertedsubcounter", "convertmonth", "convertnumber", "convertvalue", "convertvboxtohbox", "coprod", "copyboxfromcache", "copybtxlabeltext", "copyfield", "copyheadtext", "copylabeltext", "copymathlabeltext", "copyoperatortext", "copypages", "copyparameters", "copyposition", "copyprefixtext", "copyright", "copysetups", "copysuffixtext", "copytaglabeltext", "copyunittext", "correctwhitespace", "countersubs", "counttoken", "counttokens", "cramped", "crampedclap", "crampedllap", "crampedrlap", "crightarrow", "crightoverleftarrow", "crlf", "crlfplaceholder", "cstroke", "ctop", "ctxcommand", "ctxdirectcommand", "ctxdirectlua", "ctxfunction", "ctxlatecommand", "ctxlatelua", "ctxloadluafile", "ctxlua", "ctxluabuffer", "ctxluacode", "ctxreport", "ctxsprint", "cup", "curlyeqprec", "curlyeqsucc", "curlyvee", "curlywedge", "currentassignmentlistkey", "currentassignmentlistvalue", "currentbtxuservariable", "currentcommalistitem", "currentcomponent", "currentdate", "currentenvironment", "currentfeaturetest", "currentheadnumber", "currentinterface", "currentlanguage", "currentlistentrydestinationattribute", "currentlistentrylimitedtext", "currentlistentrynumber", "currentlistentrypagenumber", "currentlistentryreferenceattribute", "currentlistentrytitle", "currentlistentrytitlerendered", "currentlistsymbol", "currentmainlanguage", "currentmessagetext", "currentmoduleparameter", "currentoutputstream", "currentproduct", "currentproject", "currentregime", "currentregisterpageuserdata", "currentresponses", "currenttime", "currentvalue", "currentxtablecolumn", "currentxtablerow", "curvearrowleft", "curvearrowright", "cwopencirclearrow", "cyrillicA", "cyrillicAE", "cyrillicAbreve", "cyrillicAdiaeresis", "cyrillicB", "cyrillicBIGYUS", "cyrillicBIGYUSiotified", "cyrillicC", "cyrillicCH", "cyrillicCHEDC", "cyrillicCHEDCabkhasian", "cyrillicCHEabkhasian", "cyrillicCHEdiaeresis", "cyrillicCHEkhakassian", "cyrillicCHEvertstroke", "cyrillicD", "cyrillicDASIAPNEUMATA", "cyrillicDJE", "cyrillicDZE", "cyrillicDZEabkhasian", "cyrillicDZHE", "cyrillicE", "cyrillicELtail", "cyrillicEMtail", "cyrillicENDC", "cyrillicENGHE", "cyrillicENhook", "cyrillicENtail", "cyrillicEREV", "cyrillicERY", "cyrillicERtick", "cyrillicEbreve", "cyrillicEdiaeresis", "cyrillicEgrave", "cyrillicEiotified", "cyrillicF", "cyrillicFITA", "cyrillicG", "cyrillicGHEmidhook", "cyrillicGHEstroke", "cyrillicGHEupturn", "cyrillicGJE", "cyrillicH", "cyrillicHA", "cyrillicHADC", "cyrillicHRDSN", "cyrillicI", "cyrillicIE", "cyrillicII", "cyrillicISHRT", "cyrillicISHRTtail", "cyrillicIZHITSA", "cyrillicIZHITSAdoublegrave", "cyrillicIdiaeresis", "cyrillicIgrave", "cyrillicImacron", "cyrillicJE", "cyrillicK", "cyrillicKADC", "cyrillicKAbashkir", "cyrillicKAhook", "cyrillicKAstroke", "cyrillicKAvertstroke", "cyrillicKJE", "cyrillicKOPPA", "cyrillicKSI", "cyrillicL", "cyrillicLITTLEYUS", "cyrillicLITTLEYUSiotified", "cyrillicLJE", "cyrillicM", "cyrillicN", "cyrillicNJE", "cyrillicO", "cyrillicOMEGA", "cyrillicOMEGAround", "cyrillicOMEGAtitlo", "cyrillicOT", "cyrillicObarred", "cyrillicObarreddiaeresis", "cyrillicOdiaeresis", "cyrillicP", "cyrillicPALATALIZATION", "cyrillicPALOCHKA", "cyrillicPEmidhook", "cyrillicPSI", "cyrillicPSILIPNEUMATA", "cyrillicR", "cyrillicS", "cyrillicSCHWA", "cyrillicSCHWAdiaeresis", "cyrillicSDSC", "cyrillicSEMISOFT", "cyrillicSFTSN", "cyrillicSH", "cyrillicSHCH", "cyrillicSHHA", "cyrillicT", "cyrillicTEDC", "cyrillicTETSE", "cyrillicTITLO", "cyrillicTSHE", "cyrillicU", "cyrillicUK", "cyrillicUSHRT", "cyrillicUdiaeresis", "cyrillicUdoubleacute", "cyrillicUmacron", "cyrillicV", "cyrillicYA", "cyrillicYAT", "cyrillicYERUdiaeresis", "cyrillicYI", "cyrillicYO", "cyrillicYU", "cyrillicYstr", "cyrillicYstrstroke", "cyrillicZ", "cyrillicZDSC", "cyrillicZEdiaeresis", "cyrillicZH", "cyrillicZHEbreve", "cyrillicZHEdescender", "cyrillicZHEdiaeresis", "cyrillica", "cyrillicabreve", "cyrillicadiaeresis", "cyrillicae", "cyrillicb", "cyrillicbigyus", "cyrillicbigyusiotified", "cyrillicc", "cyrillicch", "cyrilliccheabkhasian", "cyrillicchedc", "cyrillicchedcabkhasian", "cyrillicchediaeresis", "cyrillicchekhakassian", "cyrillicchevertstroke", "cyrillicd", "cyrillicdje", "cyrillicdze", "cyrillicdzeabkhasian", "cyrillicdzhe", "cyrillice", "cyrillicebreve", "cyrillicediaeresis", "cyrillicegrave", "cyrilliceiotified", "cyrilliceltail", "cyrillicemtail", "cyrillicendc", "cyrillicenghe", "cyrillicenhook", "cyrillicentail", "cyrillicerev", "cyrillicertick", "cyrillicery", "cyrillicf", "cyrillicfita", "cyrillicg", "cyrillicghemidhook", "cyrillicghestroke", "cyrillicgheupturn", "cyrillicgje", "cyrillich", "cyrillicha", "cyrillichadc", "cyrillichrdsn", "cyrillici", "cyrillicidiaeresis", "cyrillicie", "cyrillicigrave", "cyrillicii", "cyrillicimacron", "cyrillicishrt", "cyrillicishrttail", "cyrillicizhitsa", "cyrillicizhitsadoublegrave", "cyrillicje", "cyrillick", "cyrillickabashkir", "cyrillickadc", "cyrillickahook", "cyrillickastroke", "cyrillickavertstroke", "cyrillickje", "cyrillickoppa", "cyrillicksi", "cyrillicl", "cyrilliclittleyus", "cyrilliclittleyusiotified", "cyrilliclje", "cyrillicm", "cyrillicn", "cyrillicnje", "cyrillico", "cyrillicobarred", "cyrillicobarreddiaeresis", "cyrillicodiaeresis", "cyrillicomega", "cyrillicomegaround", "cyrillicomegatitlo", "cyrillicot", "cyrillicp", "cyrillicpemidhook", "cyrillicpsi", "cyrillicr", "cyrillics", "cyrillicschwa", "cyrillicschwadiaeresis", "cyrillicsdsc", "cyrillicsemisoft", "cyrillicsftsn", "cyrillicsh", "cyrillicshch", "cyrillicshha", "cyrillict", "cyrillictedc", "cyrillictetse", "cyrillictshe", "cyrillicu", "cyrillicudiaeresis", "cyrillicudoubleacute", "cyrillicuk", "cyrillicumacron", "cyrillicushrt", "cyrillicv", "cyrillicya", "cyrillicyat", "cyrillicyerudiaeresis", "cyrillicyi", "cyrillicyo", "cyrillicystr", "cyrillicystrstroke", "cyrillicyu", "cyrillicz", "cyrilliczdsc", "cyrilliczediaeresis", "cyrilliczh", "cyrilliczhebreve", "cyrilliczhedescender", "cyrilliczhediaeresis", "d", "dag", "dagger", "daleth", "dasharrow", "dashedleftarrow", "dashedrightarrow", "dashv", "datasetvariable", "date", "daylong", "dayoftheweek", "dayshort", "dayspermonth", "dbinom", "dcaron", "dcurl", "ddag", "ddagger", "dddot", "ddot", "ddots", "decrement", "decrementcounter", "decrementedcounter", "decrementpagenumber", "decrementsubpagenumber", "decrementvalue", "defaultinterface", "defaultobjectpage", "defaultobjectreference", "defcatcodecommand", "defconvertedargument", "defconvertedcommand", "defconvertedvalue", "define", "defineMPinstance", "defineTABLEsetup", "defineaccent", "defineactivecharacter", "definealternativestyle", "defineanchor", "defineattachment", "defineattribute", "definebackground", "definebar", "defineblock", "definebodyfont", "definebodyfontenvironment", "definebodyfontswitch", "definebreakpoint", "definebreakpoints", "definebtx", "definebtxdataset", "definebtxregister", "definebtxrendering", "definebuffer", "definebutton", "definecapitals", "definecharacter", "definecharacterkerning", "definecharacterspacing", "definechemical", "definechemicals", "definechemicalsymbol", "definecollector", "definecolor", "definecolorgroup", "definecolumnbreak", "definecolumnset", "definecolumnsetarea", "definecolumnsetspan", "definecombination", "definecombinedlist", "definecommand", "definecomment", "definecomplexorsimple", "definecomplexorsimpleempty", "defineconversion", "defineconversionset", "definecounter", "definedataset", "definedate", "definedelimitedtext", "definedeq", "definedescription", "definedfont", "definedocument", "defineeffect", "defineenumeration", "defineexpandable", "defineexpansion", "defineexternalfigure", "definefacingfloat", "definefallbackfamily", "definefield", "definefieldbody", "definefieldbodyset", "definefieldcategory", "definefieldstack", "definefiguresymbol", "definefileconstant", "definefilefallback", "definefilesynonym", "definefiller", "definefirstline", "definefittingpage", "definefloat", "definefont", "definefontalternative", "definefontfallback", "definefontfamily", "definefontfamilypreset", "definefontfeature", "definefontfile", "definefontsize", "definefontsolution", "definefontstyle", "definefontsynonym", "defineformula", "defineformulaalternative", "defineformulaframed", "defineframed", "defineframedcontent", "defineframedtable", "defineframedtext", "definefrozenfont", "defineglobalcolor", "definegraphictypesynonym", "definegridsnapping", "definehbox", "definehead", "defineheadalternative", "definehelp", "definehigh", "definehighlight", "definehspace", "definehyphenationfeatures", "defineindentedtext", "defineindenting", "defineinitial", "defineinsertion", "defineinteraction", "defineinteractionbar", "defineinteractionmenu", "defineinterfaceconstant", "defineinterfaceelement", "defineinterfacevariable", "defineinterlinespace", "defineintermediatecolor", "defineitemgroup", "defineitems", "definelabel", "definelabelclass", "definelayer", "definelayerpreset", "definelayout", "definelinefiller", "definelinenote", "definelinenumbering", "definelines", "definelist", "definelistalternative", "definelistextra", "definelow", "definelowhigh", "definelowmidhigh", "definemakeup", "definemarginblock", "definemargindata", "definemarker", "definemarking", "definemathaccent", "definemathalignment", "definemathcases", "definemathcommand", "definemathdouble", "definemathdoubleextensible", "definemathematics", "definemathextensible", "definemathfence", "definemathfraction", "definemathframed", "definemathmatrix", "definemathornament", "definemathover", "definemathoverextensible", "definemathovertextextensible", "definemathradical", "definemathstackers", "definemathstyle", "definemathtriplet", "definemathunder", "definemathunderextensible", "definemathundertextextensible", "definemathunstacked", "definemeasure", "definemessageconstant", "definemixedcolumns", "definemode", "definemulticolumns", "definemultitonecolor", "definenamedcolor", "definenamespace", "definenarrower", "definenote", "defineorientation", "defineornament", "defineoutputroutine", "defineoutputroutinecommand", "defineoverlay", "definepage", "definepagebreak", "definepagechecker", "definepagecolumns", "definepageinjection", "definepageinjectionalternative", "definepageshift", "definepagestate", "definepairedbox", "definepalet", "definepapersize", "defineparagraph", "defineparagraphs", "defineparallel", "defineparbuilder", "defineperiodkerning", "defineplaceholder", "defineplacement", "definepositioning", "defineprefixset", "defineprocesscolor", "defineprocessor", "defineprofile", "defineprogram", "definepushbutton", "definepushsymbol", "definereference", "definereferenceformat", "defineregister", "definerenderingwindow", "defineresetset", "defineruby", "definescale", "definescript", "definesection", "definesectionblock", "definesectionlevels", "defineselector", "defineseparatorset", "defineshift", "definesidebar", "definesort", "definesorting", "definespotcolor", "definestartstop", "definestyle", "definestyleinstance", "definesubfield", "definesubformula", "definesymbol", "definesynonym", "definesynonyms", "definesystemattribute", "definesystemconstant", "definesystemvariable", "definetabletemplate", "definetabulate", "definetext", "definetextbackground", "definetextflow", "definetextnote", "definetokenlist", "definetooltip", "definetransparency", "definetwopasslist", "definetype", "definetypeface", "definetypescriptprefix", "definetypescriptsynonym", "definetypesetting", "definetyping", "defineunit", "defineuserdata", "defineuserdataalternative", "defineviewerlayer", "definevspace", "definevspacing", "definevspacingamount", "definextable", "defrostparagraphproperties", "delimited", "delimitedtext", "delta", "depthofstring", "depthonlybox", "depthspanningtext", "depthstrut", "determineheadnumber", "determinelistcharacteristics", "determinenoflines", "determineregistercharacteristics", "devanagarinumerals", "dfrac", "dhook", "diameter", "diamond", "diamondsuit", "differentialD", "differentiald", "digamma", "digits", "dimensiontocount", "directboxfromcache", "directcolor", "directcolored", "directconvertedcounter", "directcopyboxfromcache", "directdummyparameter", "directgetboxllx", "directgetboxlly", "directhighlight", "directlocalframed", "directluacode", "directparwrapper", "directselect", "directsetbar", "directsetup", "directsymbol", "directvspacing", "dis", "disabledirectives", "disableexperiments", "disablemode", "disableoutputstream", "disableparpositions", "disableregime", "disabletrackers", "displaymath", "displaymathematics", "displaymessage", "disposeluatable", "distributedhsize", "div", "dividedsize", "divideontimes", "divides", "doadaptleftskip", "doadaptrightskip", "doaddfeature", "doassign", "doassignempty", "doboundtext", "docheckassignment", "docheckedpair", "documentvariable", "dodoubleargument", "dodoubleargumentwithset", "dodoubleempty", "dodoubleemptywithset", "dodoublegroupempty", "doeassign", "doexpandedrecurse", "dofastloopcs", "dogetattribute", "dogetattributeid", "dogetcommacommandelement", "dogobbledoubleempty", "dogobblesingleempty", "dohyphens", "doif", "doifMPgraphicelse", "doifallcommon", "doifallcommonelse", "doifalldefinedelse", "doifallmodes", "doifallmodeselse", "doifassignmentelse", "doifassignmentelsecs", "doifblackelse", "doifbothsides", "doifbothsidesoverruled", "doifboxelse", "doifbufferelse", "doifcheckedpagestate", "doifcolor", "doifcolorelse", "doifcommandhandler", "doifcommandhandlerelse", "doifcommon", "doifcommonelse", "doifcontent", "doifconversiondefinedelse", "doifconversionnumberelse", "doifcounter", "doifcounterelse", "doifcurrentfonthasfeatureelse", "doifdefined", "doifdefinedcounter", "doifdefinedcounterelse", "doifdefinedelse", "doifdimensionelse", "doifdimenstringelse", "doifdocumentargument", "doifdocumentargumentelse", "doifdocumentfilename", "doifdocumentfilenameelse", "doifdocumentvariable", "doifdocumentvariableelse", "doifdrawingblackelse", "doifelse", "doifelseMPgraphic", "doifelseallcommon", "doifelsealldefined", "doifelseallmodes", "doifelseassignment", "doifelseassignmentcs", "doifelseblack", "doifelsebox", "doifelseboxincache", "doifelsebuffer", "doifelsecolor", "doifelsecommandhandler", "doifelsecommon", "doifelseconversiondefined", "doifelseconversionnumber", "doifelsecounter", "doifelsecurrentfonthasfeature", "doifelsecurrentsortingused", "doifelsecurrentsynonymshown", "doifelsecurrentsynonymused", "doifelsedefined", "doifelsedefinedcounter", "doifelsedimension", "doifelsedimenstring", "doifelsedocumentargument", "doifelsedocumentfilename", "doifelsedocumentvariable", "doifelsedrawingblack", "doifelseempty", "doifelseemptyvalue", "doifelseemptyvariable", "doifelseenv", "doifelsefastoptionalcheck", "doifelsefastoptionalcheckcs", "doifelsefieldbody", "doifelsefieldcategory", "doifelsefigure", "doifelsefile", "doifelsefiledefined", "doifelsefileexists", "doifelsefirstchar", "doifelseflagged", "doifelsefontchar", "doifelsefontfeature", "doifelsefontpresent", "doifelsefontsynonym", "doifelseframed", "doifelsehasspace", "doifelsehelp", "doifelseincsname", "doifelseindented", "doifelseinelement", "doifelseinputfile", "doifelseinsertion", "doifelseinset", "doifelseinstring", "doifelseinsymbolset", "doifelseintoks", "doifelseintwopassdata", "doifelseitalic", "doifelselanguage", "doifelselayerdata", "doifelselayoutdefined", "doifelselayoutsomeline", "doifelselayouttextline", "doifelseleapyear", "doifelselist", "doifelselocation", "doifelselocfile", "doifelsemainfloatbody", "doifelsemarkedcontent", "doifelsemarkedpage", "doifelsemarking", "doifelsemeaning", "doifelsemessage", "doifelsemode", "doifelsenextbgroup", "doifelsenextbgroupcs", "doifelsenextchar", "doifelsenextoptional", "doifelsenextoptionalcs", "doifelsenextparenthesis", "doifelsenonzeropositive", "doifelsenoteonsamepage", "doifelsenothing", "doifelsenumber", "doifelseobjectfound", "doifelseobjectreferencefound", "doifelseoddpage", "doifelseoddpagefloat", "doifelseoldercontext", "doifelseolderversion", "doifelseorientation", "doifelseoverlapping", "doifelseoverlay", "doifelseparallel", "doifelseparentfile", "doifelseparwrapper", "doifelsepath", "doifelsepathexists", "doifelsepatterns", "doifelseposition", "doifelsepositionaction", "doifelsepositiononpage", "doifelsepositionsonsamepage", "doifelsepositionsonthispage", "doifelsepositionsused", "doifelsereferencefound", "doifelserightpage", "doifelserightpagefloat", "doifelserighttoleftinbox", "doifelsesamelinereference", "doifelsesamestring", "doifelsesetups", "doifelsesomebackground", "doifelsesomespace", "doifelsesomething", "doifelsesometoks", "doifelsestringinstring", "doifelsestructurelisthasnumber", "doifelsestructurelisthaspage", "doifelsesymboldefined", "doifelsesymbolset", "doifelsetext", "doifelsetextflow", "doifelsetextflowcollector", "doifelsetopofpage", "doifelsetypingfile", "doifelseundefined", "doifelseurldefined", "doifelsevalue", "doifelsevaluenothing", "doifelsevariable", "doifempty", "doifemptyelse", "doifemptytoks", "doifemptyvalue", "doifemptyvalueelse", "doifemptyvariable", "doifemptyvariableelse", "doifenv", "doifenvelse", "doiffastoptionalcheckcselse", "doiffastoptionalcheckelse", "doiffieldbodyelse", "doiffieldcategoryelse", "doiffigureelse", "doiffile", "doiffiledefinedelse", "doiffileelse", "doiffileexistselse", "doiffirstcharelse", "doifflaggedelse", "doiffontcharelse", "doiffontfeatureelse", "doiffontpresentelse", "doiffontsynonymelse", "doifhasspaceelse", "doifhelpelse", "doifincsnameelse", "doifinelementelse", "doifinputfileelse", "doifinsertionelse", "doifinset", "doifinsetelse", "doifinstring", "doifinstringelse", "doifinsymbolset", "doifinsymbolsetelse", "doifintokselse", "doifintwopassdataelse", "doifitalicelse", "doiflanguageelse", "doiflayerdataelse", "doiflayoutdefinedelse", "doiflayoutsomelineelse", "doiflayouttextlineelse", "doifleapyearelse", "doiflistelse", "doiflocationelse", "doiflocfileelse", "doifmainfloatbodyelse", "doifmarkingelse", "doifmeaningelse", "doifmessageelse", "doifmode", "doifmodeelse", "doifnextbgroupcselse", "doifnextbgroupelse", "doifnextcharelse", "doifnextoptionalcselse", "doifnextoptionalelse", "doifnextparenthesiselse", "doifnonzeropositiveelse", "doifnot", "doifnotallcommon", "doifnotallmodes", "doifnotcommandhandler", "doifnotcommon", "doifnotcounter", "doifnotdocumentargument", "doifnotdocumentfilename", "doifnotdocumentvariable", "doifnotempty", "doifnotemptyvalue", "doifnotemptyvariable", "doifnotenv", "doifnoteonsamepageelse", "doifnotescollected", "doifnotfile", "doifnotflagged", "doifnothing", "doifnothingelse", "doifnotinset", "doifnotinsidesplitfloat", "doifnotinstring", "doifnotmode", "doifnotnumber", "doifnotsamestring", "doifnotsetups", "doifnotvalue", "doifnotvariable", "doifnumber", "doifnumberelse", "doifobjectfoundelse", "doifobjectreferencefoundelse", "doifoddpageelse", "doifoddpagefloatelse", "doifoldercontextelse", "doifolderversionelse", "doifoutervmode", "doifoverlappingelse", "doifoverlayelse", "doifparallelelse", "doifparentfileelse", "doifpathelse", "doifpathexistselse", "doifpatternselse", "doifposition", "doifpositionaction", "doifpositionactionelse", "doifpositionelse", "doifpositiononpageelse", "doifpositionsonsamepageelse", "doifpositionsonthispageelse", "doifpositionsusedelse", "doifreferencefoundelse", "doifrightpageelse", "doifrightpagefloatelse", "doifrighttoleftinboxelse", "doifsamelinereferenceelse", "doifsamestring", "doifsamestringelse", "doifsetups", "doifsetupselse", "doifsomebackground", "doifsomebackgroundelse", "doifsomespaceelse", "doifsomething", "doifsomethingelse", "doifsometoks", "doifsometokselse", "doifstringinstringelse", "doifstructurelisthasnumberelse", "doifstructurelisthaspageelse", "doifsymboldefinedelse", "doifsymbolsetelse", "doiftext", "doiftextelse", "doiftextflowcollectorelse", "doiftextflowelse", "doiftopofpageelse", "doiftypingfileelse", "doifundefined", "doifundefinedcounter", "doifundefinedelse", "doifunknownfontfeature", "doifurldefinedelse", "doifvalue", "doifvalueelse", "doifvaluenothing", "doifvaluenothingelse", "doifvaluesomething", "doifvariable", "doifvariableelse", "doindentation", "dollar", "doloop", "doloopoverlist", "donothing", "dontconvertfont", "dontleavehmode", "dontpermitspacesbetweengroups", "dopositionaction", "doprocesslocalsetups", "doquadrupleargument", "doquadrupleempty", "doquadruplegroupempty", "doquintupleargument", "doquintupleempty", "doquintuplegroupempty", "dorechecknextindentation", "dorecurse", "dorepeatwithcommand", "doreplacefeature", "doresetandafffeature", "doresetattribute", "dorotatebox", "dosetattribute", "dosetleftskipadaption", "dosetrightskipadaption", "dosetupcheckedinterlinespace", "doseventupleargument", "doseventupleempty", "dosingleargument", "dosingleempty", "dosinglegroupempty", "dosixtupleargument", "dosixtupleempty", "dosomebreak", "dostepwiserecurse", "dosubtractfeature", "dot", "doteq", "doteqdot", "dotfill", "dotfskip", "dotlessI", "dotlessJ", "dotlessi", "dotlessj", "dotlessjstroke", "dotminus", "dotoks", "dotplus", "dotripleargument", "dotripleargumentwithset", "dotripleempty", "dotripleemptywithset", "dotriplegroupempty", "dots", "dottedcircle", "dottedrightarrow", "doublebar", "doublebond", "doublebrace", "doublebracket", "doublecap", "doublecup", "doubleparent", "doubleprime", "doubleverticalbar", "dowith", "dowithnextbox", "dowithnextboxcontent", "dowithnextboxcontentcs", "dowithnextboxcs", "dowithpargument", "dowithrange", "dowithwargument", "downarrow", "downdasharrow", "downdownarrows", "downharpoonleft", "downharpoonright", "downuparrows", "downwhitearrow", "downzigzagarrow", "dpofstring", "dstroke", "dtail", "dummydigit", "dummyparameter", "dzcaronligature", "dzligature", "eTeX", "eacute", "ebreve", "ecaron", "ecedilla", "ecircumflex", "ecircumflexacute", "ecircumflexdotbelow", "ecircumflexgrave", "ecircumflexhook", "ecircumflextilde", "edefconvertedargument", "ediaeresis", "edotaccent", "edotbelow", "edoublegrave", "efcmaxheight", "efcmaxwidth", "efcminheight", "efcminwidth", "efcparameter", "effect", "egrave", "ehook", "einvertedbreve", "elapsedseconds", "elapsedsteptime", "elapsedtime", "eleftarrowfill", "eleftharpoondownfill", "eleftharpoonupfill", "eleftrightarrowfill", "ell", "em", "emacron", "emdash", "emphasisboldface", "emphasistypeface", "emptylines", "emptyset", "emquad", "emspace", "enableasciimode", "enabledirectives", "enableexperiments", "enablemode", "enableoutputstream", "enableparpositions", "enableregime", "enabletrackers", "endash", "endnote", "enquad", "enskip", "enspace", "env", "environment", "envvar", "eogonek", "eoverbarfill", "eoverbracefill", "eoverbracketfill", "eoverparentfill", "epos", "epsilon", "eq", "eqcirc", "eqeq", "eqeqeq", "eqgtr", "eqless", "eqsim", "eqslantgtr", "eqslantless", "equaldigits", "equalscolon", "equiv", "erightarrowfill", "erightharpoondownfill", "erightharpoonupfill", "eta", "eth", "ethiopic", "etilde", "etwoheadrightarrowfill", "eunderbarfill", "eunderbracefill", "eunderbracketfill", "eunderparentfill", "exclamdown", "executeifdefined", "exists", "exitloop", "exitloopnow", "expandcheckedcsname", "expanded", "expandeddoif", "expandeddoifelse", "expandeddoifnot", "expandfontsynonym", "expdoif", "expdoifcommonelse", "expdoifelse", "expdoifelsecommon", "expdoifelseinset", "expdoifinsetelse", "expdoifnot", "exponentiale", "externalfigure", "externalfigurecollectionmaxheight", "externalfigurecollectionmaxwidth", "externalfigurecollectionminheight", "externalfigurecollectionminwidth", "externalfigurecollectionparameter", "fakebox", "fallingdotseq", "fastdecrement", "fastincrement", "fastlocalframed", "fastloopfinal", "fastloopindex", "fastscale", "fastsetup", "fastsetupwithargument", "fastsetupwithargumentswapped", "fastswitchtobodyfont", "fastsxsy", "feature", "fence", "fenced", "fetchallmarkings", "fetchallmarks", "fetchmark", "fetchmarking", "fetchonemark", "fetchonemarking", "fetchruntinecommand", "fetchtwomarkings", "fetchtwomarks", "ffiligature", "ffligature", "fflligature", "fhook", "field", "fieldbody", "fieldstack", "fifthoffivearguments", "fifthofsixarguments", "figurefilename", "figurefilepath", "figurefiletype", "figurefullname", "figureheight", "figurenaturalheight", "figurenaturalwidth", "figurespace", "figuresymbol", "figurewidth", "filename", "filigature", "filledhboxb", "filledhboxc", "filledhboxg", "filledhboxk", "filledhboxm", "filledhboxr", "filledhboxy", "filler", "fillinline", "fillinrules", "fillintext", "fillupto", "filterfromnext", "filterfromvalue", "filterpages", "filterreference", "findtwopassdata", "finishregisterentry", "firstcharacter", "firstcounter", "firstcountervalue", "firstinlist", "firstoffivearguments", "firstoffourarguments", "firstofoneargument", "firstofoneunexpanded", "firstofsixarguments", "firstofthreearguments", "firstofthreeunexpanded", "firstoftwoarguments", "firstoftwounexpanded", "firstrealpage", "firstrealpagenumber", "firstsubcountervalue", "firstsubpage", "firstsubpagenumber", "firstuserpage", "firstuserpagenumber", "fitfield", "fitfieldframed", "fittopbaselinegrid", "fiveeighths", "fivesixths", "fixedspace", "fixedspaces", "flag", "flat", "flligature", "floatuserdataparameter", "flushbox", "flushboxregister", "flushcollector", "flushedrightlastline", "flushlayer", "flushlocalfloats", "flushnextbox", "flushnotes", "flushoutputstream", "flushshapebox", "flushtextflow", "flushtokens", "flushtoks", "fontalternative", "fontbody", "fontchar", "fontcharbyindex", "fontclass", "fontclassname", "fontface", "fontfeaturelist", "fontsize", "fontstyle", "footnote", "footnotetext", "forall", "forcecharacterstripping", "forcelocalfloats", "forgeteverypar", "forgetparagraphfreezing", "forgetparameters", "forgetparskip", "forgetparwrapper", "forgetragged", "formula", "formulanumber", "foundbox", "fourfifths", "fourperemspace", "fourthoffivearguments", "fourthoffourarguments", "fourthofsixarguments", "frac", "framed", "frameddimension", "framedparameter", "framedtext", "freezedimenmacro", "freezemeasure", "freezeparagraphproperties", "frenchspacing", "from", "fromlinenote", "frown", "frozenhbox", "frule", "gacute", "gamma", "gbreve", "gcaron", "gcircumflex", "gcommaaccent", "gdefconvertedargument", "gdefconvertedcommand", "gdotaccent", "ge", "geq", "geqq", "geqslant", "getMPdrawing", "getMPlayer", "getboxfromcache", "getboxllx", "getboxlly", "getbuffer", "getbufferdata", "getcommacommandsize", "getcommalistsize", "getdatavalue", "getdayoftheweek", "getdayspermonth", "getdefinedbuffer", "getdocumentargument", "getdocumentargumentdefault", "getdocumentfilename", "getdummyparameters", "getemptyparameters", "geteparameters", "getexpandedparameters", "getfiguredimensions", "getfirstcharacter", "getfirsttwopassdata", "getfromcommacommand", "getfromcommalist", "getfromluatable", "getfromtwopassdata", "getglyphdirect", "getglyphstyled", "getgparameters", "getinlineuserdata", "getlasttwopassdata", "getlocalfloat", "getlocalfloats", "getmarking", "getmessage", "getnamedglyphdirect", "getnamedglyphstyled", "getnamedtwopassdatalist", "getnaturaldimensions", "getnoflines", "getobject", "getobjectdimensions", "getpaletsize", "getparameters", "getparwrapper", "getprivatechar", "getprivateslot", "getrandomcount", "getrandomdimen", "getrandomfloat", "getrandomnumber", "getrandomseed", "getraweparameters", "getrawgparameters", "getrawnoflines", "getrawparameters", "getrawxparameters", "getreference", "getreferenceentry", "getroundednoflines", "gets", "getsubstring", "gettokenlist", "gettwopassdata", "gettwopassdatalist", "getuserdata", "getuvalue", "getvalue", "getvariable", "getvariabledefault", "getxparameters", "gg", "ggg", "gggtr", "gimel", "globaldisablemode", "globalenablemode", "globalletempty", "globalpopbox", "globalpopmacro", "globalpreventmode", "globalprocesscommalist", "globalpushbox", "globalpushmacro", "globalswapcounts", "globalswapdimens", "globalswapmacros", "globalundefine", "glyphfontfile", "gnapprox", "gneqq", "gnsim", "gobbledoubleempty", "gobbleeightarguments", "gobblefivearguments", "gobblefiveoptionals", "gobblefourarguments", "gobblefouroptionals", "gobbleninearguments", "gobbleoneargument", "gobbleoneoptional", "gobblesevenarguments", "gobblesingleempty", "gobblesixarguments", "gobblespacetokens", "gobbletenarguments", "gobblethreearguments", "gobblethreeoptionals", "gobbletwoarguments", "gobbletwooptionals", "gobbleuntil", "gobbleuntilrelax", "godown", "goto", "gotobox", "gotopage", "grabbufferdata", "grabbufferdatadirect", "grabuntil", "grave", "graycolor", "grayvalue", "greedysplitstring", "greekAlpha", "greekAlphadasia", "greekAlphadasiaperispomeni", "greekAlphadasiatonos", "greekAlphadasiavaria", "greekAlphaiotasub", "greekAlphaiotasubdasia", "greekAlphaiotasubdasiaperispomeni", "greekAlphaiotasubdasiatonos", "greekAlphaiotasubdasiavaria", "greekAlphaiotasubpsili", "greekAlphaiotasubpsiliperispomeni", "greekAlphaiotasubpsilitonos", "greekAlphaiotasubpsilivaria", "greekAlphamacron", "greekAlphapsili", "greekAlphapsiliperispomeni", "greekAlphapsilitonos", "greekAlphapsilivaria", "greekAlphatonos", "greekAlphavaria", "greekAlphavrachy", "greekBeta", "greekChi", "greekCoronis", "greekDelta", "greekEpsilon", "greekEpsilondasia", "greekEpsilondasiatonos", "greekEpsilondasiavaria", "greekEpsilonpsili", "greekEpsilonpsilitonos", "greekEpsilonpsilivaria", "greekEpsilontonos", "greekEpsilonvaria", "greekEta", "greekEtadasia", "greekEtadasiaperispomeni", "greekEtadasiatonos", "greekEtadasiavaria", "greekEtaiotasub", "greekEtaiotasubdasia", "greekEtaiotasubdasiaperispomeni", "greekEtaiotasubdasiatonos", "greekEtaiotasubdasiavaria", "greekEtaiotasubpsili", "greekEtaiotasubpsiliperispomeni", "greekEtaiotasubpsilitonos", "greekEtaiotasubpsilivaria", "greekEtapsili", "greekEtapsiliperispomeni", "greekEtapsilitonos", "greekEtapsilivaria", "greekEtatonos", "greekEtavaria", "greekGamma", "greekIota", "greekIotadasia", "greekIotadasiaperispomeni", "greekIotadasiatonos", "greekIotadasiavaria", "greekIotadialytika", "greekIotamacron", "greekIotapsili", "greekIotapsiliperispomeni", "greekIotapsilitonos", "greekIotapsilivaria", "greekIotatonos", "greekIotavaria", "greekIotavrachy", "greekKappa", "greekLambda", "greekMu", "greekNu", "greekOmega", "greekOmegadasia", "greekOmegadasiaperispomeni", "greekOmegadasiatonos", "greekOmegadasiavaria", "greekOmegaiotasub", "greekOmegaiotasubdasia", "greekOmegaiotasubdasiaperispomeni", "greekOmegaiotasubdasiatonos", "greekOmegaiotasubdasiavaria", "greekOmegaiotasubpsili", "greekOmegaiotasubpsiliperispomeni", "greekOmegaiotasubpsilitonos", "greekOmegaiotasubpsilivaria", "greekOmegapsili", "greekOmegapsiliperispomeni", "greekOmegapsilitonos", "greekOmegapsilivaria", "greekOmegatonos", "greekOmegavaria", "greekOmicron", "greekOmicrondasia", "greekOmicrondasiatonos", "greekOmicrondasiavaria", "greekOmicronpsili", "greekOmicronpsilitonos", "greekOmicronpsilivaria", "greekOmicrontonos", "greekOmicronvaria", "greekPhi", "greekPi", "greekPsi", "greekRho", "greekRhodasia", "greekSigma", "greekSigmalunate", "greekTau", "greekTheta", "greekUpsilon", "greekUpsilondasia", "greekUpsilondasiaperispomeni", "greekUpsilondasiatonos", "greekUpsilondasiavaria", "greekUpsilondialytika", "greekUpsilonmacron", "greekUpsilontonos", "greekUpsilonvaria", "greekUpsilonvrachy", "greekXi", "greekZeta", "greekalpha", "greekalphadasia", "greekalphadasiaperispomeni", "greekalphadasiatonos", "greekalphadasiavaria", "greekalphaiotasub", "greekalphaiotasubdasia", "greekalphaiotasubdasiaperispomeni", "greekalphaiotasubdasiatonos", "greekalphaiotasubdasiavaria", "greekalphaiotasubperispomeni", "greekalphaiotasubpsili", "greekalphaiotasubpsiliperispomeni", "greekalphaiotasubpsilitonos", "greekalphaiotasubpsilivaria", "greekalphaiotasubtonos", "greekalphaiotasubvaria", "greekalphamacron", "greekalphaoxia", "greekalphaperispomeni", "greekalphapsili", "greekalphapsiliperispomeni", "greekalphapsilitonos", "greekalphapsilivaria", "greekalphatonos", "greekalphavaria", "greekalphavrachy", "greekbeta", "greekbetaalt", "greekchi", "greekdasia", "greekdasiaperispomeni", "greekdasiavaria", "greekdelta", "greekdialytikaperispomeni", "greekdialytikatonos", "greekdialytikavaria", "greekdigamma", "greekepsilon", "greekepsilonalt", "greekepsilondasia", "greekepsilondasiatonos", "greekepsilondasiavaria", "greekepsilonoxia", "greekepsilonpsili", "greekepsilonpsilitonos", "greekepsilonpsilivaria", "greekepsilontonos", "greekepsilonvaria", "greeketa", "greeketadasia", "greeketadasiaperispomeni", "greeketadasiatonos", "greeketadasiavaria", "greeketaiotasub", "greeketaiotasubdasia", "greeketaiotasubdasiaperispomeni", "greeketaiotasubdasiatonos", "greeketaiotasubdasiavaria", "greeketaiotasubperispomeni", "greeketaiotasubpsili", "greeketaiotasubpsiliperispomeni", "greeketaiotasubpsilitonos", "greeketaiotasubpsilivaria", "greeketaiotasubtonos", "greeketaiotasubvaria", "greeketaoxia", "greeketaperispomeni", "greeketapsili", "greeketapsiliperispomeni", "greeketapsilitonos", "greeketapsilivaria", "greeketatonos", "greeketavaria", "greekfinalsigma", "greekgamma", "greekiota", "greekiotadasia", "greekiotadasiaperispomeni", "greekiotadasiatonos", "greekiotadasiavaria", "greekiotadialytika", "greekiotadialytikaperispomeni", "greekiotadialytikatonos", "greekiotadialytikavaria", "greekiotamacron", "greekiotaoxia", "greekiotaperispomeni", "greekiotapsili", "greekiotapsiliperispomeni", "greekiotapsilitonos", "greekiotapsilivaria", "greekiotatonos", "greekiotavaria", "greekiotavrachy", "greekkappa", "greekkoppa", "greeklambda", "greekmu", "greeknu", "greeknumerals", "greeknumkoppa", "greekomega", "greekomegadasia", "greekomegadasiaperispomeni", "greekomegadasiatonos", "greekomegadasiavaria", "greekomegaiotasub", "greekomegaiotasubdasia", "greekomegaiotasubdasiaperispomeni", "greekomegaiotasubdasiatonos", "greekomegaiotasubdasiavaria", "greekomegaiotasubperispomeni", "greekomegaiotasubpsili", "greekomegaiotasubpsiliperispomeni", "greekomegaiotasubpsilitonos", "greekomegaiotasubpsilivaria", "greekomegaiotasubtonos", "greekomegaiotasubvaria", "greekomegaoxia", "greekomegaperispomeni", "greekomegapsili", "greekomegapsiliperispomeni", "greekomegapsilitonos", "greekomegapsilivaria", "greekomegatonos", "greekomegavaria", "greekomicron", "greekomicrondasia", "greekomicrondasiatonos", "greekomicrondasiavaria", "greekomicronoxia", "greekomicronpsili", "greekomicronpsilitonos", "greekomicronpsilivaria", "greekomicrontonos", "greekomicronvaria", "greekoxia", "greekperispomeni", "greekphi", "greekphialt", "greekpi", "greekpialt", "greekprosgegrammeni", "greekpsi", "greekpsili", "greekpsiliperispomeni", "greekpsilivaria", "greekrho", "greekrhoalt", "greekrhodasia", "greekrhopsili", "greeksampi", "greeksigma", "greeksigmalunate", "greekstigma", "greektau", "greektheta", "greekthetaalt", "greektonos", "greekupsilon", "greekupsilondasia", "greekupsilondasiaperispomeni", "greekupsilondasiatonos", "greekupsilondasiavaria", "greekupsilondiaeresis", "greekupsilondialytikaperispomeni", "greekupsilondialytikatonos", "greekupsilondialytikavaria", "greekupsilonmacron", "greekupsilonoxia", "greekupsilonperispomeni", "greekupsilonpsili", "greekupsilonpsiliperispomeni", "greekupsilonpsilitonos", "greekupsilonpsilivaria", "greekupsilontonos", "greekupsilonvaria", "greekupsilonvrachy", "greekvaria", "greekxi", "greekzeta", "grid", "groupedcommand", "gsetboxllx", "gsetboxlly", "gstroke", "gt", "gtrapprox", "gtrdot", "gtreqless", "gtreqqless", "gtrless", "gtrsim", "guilsingleleft", "guilsingleright", "gujaratinumerals", "gurmurkhinumerals", "hairline", "hairspace", "halflinestrut", "halfstrut", "halfwaybox", "handletokens", "handwritten", "hangul", "hanzi", "hash", "hat", "hbar", "hboxofvbox", "hboxreference", "hcaron", "hcircumflex", "hdofstring", "headhbox", "headlanguage", "headnumber", "headnumbercontent", "headnumberdistance", "headnumberwidth", "headreferenceattributes", "headsetupspacing", "headtext", "headtextcontent", "headtextdistance", "headtexts", "headtextwidth", "headvbox", "headwidth", "heartsuit", "hebrewAlef", "hebrewAyin", "hebrewBet", "hebrewDalet", "hebrewGimel", "hebrewHe", "hebrewHet", "hebrewKaf", "hebrewKaffinal", "hebrewLamed", "hebrewMem", "hebrewMemfinal", "hebrewNun", "hebrewNunfinal", "hebrewPe", "hebrewPefinal", "hebrewQof", "hebrewResh", "hebrewSamekh", "hebrewShin", "hebrewTav", "hebrewTet", "hebrewTsadi", "hebrewTsadifinal", "hebrewVav", "hebrewYod", "hebrewZayin", "hebrewnumerals", "heightanddepthofstring", "heightofstring", "heightspanningtext", "helptext", "hexnumber", "hexstringtonumber", "hglue", "hiddenbar", "hiddencitation", "hiddencite", "hideblocks", "high", "highlight", "highordinalstr", "hilo", "himilo", "hl", "hookleftarrow", "hookrightarrow", "horizontalgrowingbar", "horizontalpositionbar", "hpackbox", "hpackedbox", "hphantom", "hpos", "hsizefraction", "hslash", "hsmash", "hsmashbox", "hsmashed", "hspace", "hstroke", "htdpofstring", "htofstring", "hyphen", "hyphenatedcoloredword", "hyphenatedfile", "hyphenatedfilename", "hyphenatedhbox", "hyphenatedpar", "hyphenatedurl", "hyphenatedword", "iacute", "ibox", "ibreve", "icaron", "icircumflex", "ideographichalffillspace", "ideographicspace", "idiaeresis", "idotaccent", "idotbelow", "idoublegrave", "idxfromluatable", "ifassignment", "iff", "ifinobject", "ifinoutputstream", "ifparameters", "iftrialtypesetting", "ignoreimplicitspaces", "ignoretagsinexport", "ignorevalue", "igrave", "ihook", "iiiint", "iiiintop", "iiint", "iiintop", "iint", "iintop", "iinvertedbreve", "ijligature", "imacron", "imaginaryi", "imaginaryj", "imath", "immediatesavetwopassdata", "impliedby", "implies", "imply", "in", "includemenu", "includesvgbuffer", "includesvgfile", "includeversioninfo", "increment", "incrementcounter", "incrementedcounter", "incrementpagenumber", "incrementsubpagenumber", "incrementvalue", "indentation", "index", "infofont", "infofontbold", "inframed", "infty", "infull", "inheritparameter", "inhibitblank", "ininner", "ininneredge", "ininnermargin", "initializeboxstack", "inleft", "inleftedge", "inleftmargin", "inline", "inlinebuffer", "inlinedbox", "inlinemath", "inlinemathematics", "inlinemessage", "inlineordisplaymath", "inlineprettyprintbuffer", "inlinerange", "inmargin", "inmframed", "innerflushshapebox", "inother", "inouter", "inouteredge", "inoutermargin", "input", "inputfilebarename", "inputfilename", "inputfilerealsuffix", "inputfilesuffix", "inputgivenfile", "inright", "inrightedge", "inrightmargin", "insertpages", "inspectluatable", "installactionhandler", "installactivecharacter", "installanddefineactivecharacter", "installattributestack", "installautocommandhandler", "installautosetuphandler", "installbasicautosetuphandler", "installbasicparameterhandler", "installbottomframerenderer", "installcommandhandler", "installcorenamespace", "installctxfunction", "installctxscanner", "installdefinehandler", "installdefinitionset", "installdefinitionsetmember", "installdirectcommandhandler", "installdirectparameterhandler", "installdirectparametersethandler", "installdirectsetuphandler", "installdirectstyleandcolorhandler", "installframedautocommandhandler", "installframedcommandhandler", "installglobalmacrostack", "installlanguage", "installleftframerenderer", "installmacrostack", "installnamespace", "installoutputroutine", "installpagearrangement", "installparameterhandler", "installparameterhashhandler", "installparametersethandler", "installparentinjector", "installprotectedctxfunction", "installprotectedctxscanner", "installrightframerenderer", "installrootparameterhandler", "installsetuphandler", "installsetuponlycommandhandler", "installshipoutmethod", "installsimplecommandhandler", "installsimpleframedcommandhandler", "installstyleandcolorhandler", "installswitchcommandhandler", "installswitchsetuphandler", "installtexdirective", "installtextracker", "installtopframerenderer", "installunitsseparator", "installunitsspace", "installversioninfo", "int", "intclockwise", "integerrounding", "integers", "interactionbar", "interactionbuttons", "interactionmenu", "intercal", "intertext", "interwordspaceafter", "interwordspacebefore", "interwordspaces", "interwordspacesafter", "interwordspacesbefore", "intop", "invisibletimes", "invokepagehandler", "iogonek", "iota", "italic", "italicbold", "italiccorrection", "italicface", "item", "items", "itemtag", "itilde", "jcaron", "jcircumflex", "jmath", "jobfilename", "jobfilesuffix", "kap", "kappa", "kcaron", "kcommaaccent", "keepblocks", "keeplinestogether", "keepunwantedspaces", "kerncharacters", "khook", "kkra", "koreancirclenumerals", "koreannumerals", "koreannumeralsc", "koreannumeralsp", "koreanparentnumerals", "lVert", "labellanguage", "labeltext", "labeltexts", "lacute", "lambda", "lambdabar", "land", "langle", "language", "languageCharacters", "languagecharacters", "languagecharwidth", "lastcounter", "lastcountervalue", "lastdigit", "lastlinewidth", "lastnaturalboxdp", "lastnaturalboxht", "lastnaturalboxwd", "lastparwrapper", "lastpredefinedsymbol", "lastrealpage", "lastrealpagenumber", "lastsubcountervalue", "lastsubpage", "lastsubpagenumber", "lasttwodigits", "lastuserpage", "lastuserpagenumber", "lateluacode", "latin", "layeredtext", "layerheight", "layerwidth", "lazysavetaggedtwopassdata", "lazysavetwopassdata", "lbar", "lbox", "lbrace", "lbracket", "lcaron", "lceil", "lchexnumber", "lchexnumbers", "lcommaaccent", "lcurl", "ldotmiddle", "ldotp", "ldots", "le", "leadsto", "left", "leftaligned", "leftarrow", "leftarrowtail", "leftarrowtriangle", "leftbottombox", "leftbox", "leftdasharrow", "leftguillemot", "leftharpoondown", "leftharpoonup", "lefthbox", "leftheadtext", "leftlabeltext", "leftleftarrows", "leftline", "leftmathlabeltext", "leftorrighthbox", "leftorrightvbox", "leftorrightvtop", "leftrightarrow", "leftrightarrows", "leftrightarrowtriangle", "leftrightharpoons", "leftrightsquigarrow", "leftskipadaption", "leftsquigarrow", "leftsubguillemot", "leftthreetimes", "lefttopbox", "lefttoright", "lefttorighthbox", "lefttorightvbox", "lefttorightvtop", "leftwavearrow", "leftwhitearrow", "leq", "leqq", "leqslant", "lessapprox", "lessdot", "lesseqgtr", "lesseqqgtr", "lessgtr", "lesssim", "letbeundefined", "letcatcodecommand", "letcscsname", "letcsnamecs", "letcsnamecsname", "letdummyparameter", "letempty", "letgvalue", "letgvalueempty", "letgvalurelax", "letterampersand", "letterat", "letterbackslash", "letterbar", "letterbgroup", "letterclosebrace", "lettercolon", "letterdollar", "letterdoublequote", "letteregroup", "letterescape", "letterexclamationmark", "letterhash", "letterhat", "letterleftbrace", "letterleftbracket", "letterleftparenthesis", "letterless", "lettermore", "letteropenbrace", "letterpercent", "letterquestionmark", "letterrightbrace", "letterrightbracket", "letterrightparenthesis", "lettersinglequote", "letterslash", "letterspacing", "lettertilde", "letterunderscore", "letvalue", "letvalueempty", "letvaluerelax", "lfence", "lfloor", "lgroup", "lhbox", "lhooknwarrow", "lhooksearrow", "limitatefirstline", "limitatelines", "limitatetext", "line", "linebox", "linefeed", "linefillerhbox", "linefillervbox", "linefillervtop", "linenote", "linespanningtext", "linethickness", "linterval", "listcitation", "listcite", "listlength", "listnamespaces", "literalmode", "ljligature", "ll", "llangle", "llap", "llbracket", "llcorner", "lll", "llless", "llointerval", "lmoustache", "lnapprox", "lneq", "lneqq", "lnot", "lnsim", "loadanyfile", "loadanyfileonce", "loadbtxdefinitionfile", "loadbtxreplacementfile", "loadcldfile", "loadcldfileonce", "loadfontgoodies", "loadluafile", "loadluafileonce", "loadspellchecklist", "loadtexfile", "loadtexfileonce", "loadtypescriptfile", "localframed", "localframedwithsettings", "localhsize", "localpopbox", "localpopmacro", "localpushbox", "localpushmacro", "localundefine", "locatedfilepath", "locatefilepath", "locfilename", "logo", "lohi", "lointerval", "lomihi", "longleftarrow", "longleftrightarrow", "longmapsfrom", "longmapsto", "longrightarrow", "longrightsquigarrow", "looparrowleft", "looparrowright", "lor", "low", "lowerbox", "lowercased", "lowercasestring", "lowercasing", "lowerleftdoubleninequote", "lowerleftsingleninequote", "lowerrightdoubleninequote", "lowerrightsingleninequote", "lozenge", "lparent", "lrcorner", "lrointerval", "lrtbbox", "lstroke", "lt", "ltimes", "ltop", "ltrhbox", "ltrvbox", "ltrvtop", "luaTeX", "luacode", "luaconditional", "luaenvironment", "luaexpanded", "luaexpr", "luafunction", "luajitTeX", "luamajorversion", "luametaTeX", "luaminorversion", "luaparameterset", "luasetup", "luaversion", "lvert", "m", "mLeftarrow", "mLeftrightarrow", "mRightarrow", "mainlanguage", "makecharacteractive", "makerawcommalist", "makestrutofbox", "maltese", "mapfontsize", "mapsdown", "mapsfrom", "mapsto", "mapsup", "margindata", "margintext", "markcontent", "markedpages", "marking", "markinjector", "markpage", "markreferencepage", "mat", "math", "mathampersand", "mathbf", "mathbi", "mathblackboard", "mathbs", "mathdefault", "mathdollar", "mathdouble", "mathematics", "mathfraktur", "mathfunction", "mathhash", "mathhyphen", "mathit", "mathitalic", "mathlabellanguage", "mathlabeltext", "mathlabeltexts", "mathop", "mathover", "mathpercent", "mathrm", "mathscript", "mathsl", "mathss", "mathtext", "mathtextbf", "mathtextbi", "mathtextbs", "mathtextit", "mathtextsl", "mathtexttf", "mathtf", "mathtriplet", "mathtt", "mathunder", "mathupright", "mathword", "mathwordbf", "mathwordbi", "mathwordbs", "mathwordit", "mathwordsl", "mathwordtf", "maxaligned", "mbox", "mcframed", "measure", "measured", "measuredangle", "measuredeq", "medskip", "medspace", "menubutton", "mequal", "message", "mfence", "mframed", "mfunction", "mfunctionlabeltext", "mhbox", "mho", "mhookleftarrow", "mhookrightarrow", "mid", "midaligned", "middle", "middlealigned", "middlebox", "midhbox", "midsubsentence", "minimalhbox", "minus", "minuscolon", "mirror", "mixedcaps", "mkvibuffer", "mleftarrow", "mleftharpoondown", "mleftharpoonup", "mleftrightarrow", "mleftrightharpoons", "mmapsto", "models", "moduleparameter", "molecule", "mono", "monobold", "mononormal", "month", "monthlong", "monthshort", "mp", "mprandomnumber", "mrel", "mrightarrow", "mrightharpoondown", "mrightharpoonup", "mrightleftharpoons", "mrightoverleftarrow", "mtext", "mtriplerel", "mtwoheadleftarrow", "mtwoheadrightarrow", "mu", "multimap", "nHdownarrow", "nHuparrow", "nLeftarrow", "nLeftrightarrow", "nRightarrow", "nVDash", "nVdash", "nVleftarrow", "nVleftrightarrow", "nVrightarrow", "nabla", "nacute", "namedheadnumber", "namedstructureheadlocation", "namedstructureuservariable", "namedstructurevariable", "namedtaggedlabeltexts", "napostrophe", "napprox", "napproxEq", "narrownobreakspace", "nasymp", "natural", "naturalhbox", "naturalhpack", "naturalnumbers", "naturaltpack", "naturalvbox", "naturalvcenter", "naturalvpack", "naturalvtop", "naturalwd", "ncaron", "ncommaaccent", "ncong", "ncurl", "ndivides", "ne", "nearrow", "neg", "negatecolorbox", "negated", "negativesign", "negemspace", "negenspace", "negthinspace", "neng", "neq", "nequiv", "neswarrow", "newattribute", "newcatcodetable", "newcounter", "newevery", "newfrenchspacing", "newluatable", "newmode", "newsignal", "newsystemmode", "nexists", "nextbox", "nextboxdp", "nextboxht", "nextboxhtdp", "nextboxwd", "nextcounter", "nextcountervalue", "nextdepth", "nextparagraphs", "nextrealpage", "nextrealpagenumber", "nextsubcountervalue", "nextsubpage", "nextsubpagenumber", "nextuserpage", "nextuserpagenumber", "ngeq", "ngrave", "ngtr", "ngtrless", "ngtrsim", "ni", "nihongo", "nin", "njligature", "nleftarrow", "nleftrightarrow", "nleq", "nless", "nlessgtr", "nlesssim", "nmid", "nni", "nobar", "nobreakspace", "nocap", "nocharacteralign", "nocitation", "nocite", "nodetostring", "noffigurepages", "noflines", "noflinesinbox", "noflocalfloats", "noheaderandfooterlines", "noheightstrut", "nohyphens", "noindentation", "noitem", "nonfrenchspacing", "nonmathematics", "nonvalidassignment", "normal", "normalboldface", "normalframedwithsettings", "normalitalicface", "normalizebodyfontsize", "normalizedfontsize", "normalizefontdepth", "normalizefontheight", "normalizefontline", "normalizefontwidth", "normalizetextdepth", "normalizetextheight", "normalizetextline", "normalizetextwidth", "normalslantedface", "normaltypeface", "nospace", "not", "note", "notesymbol", "notin", "notopandbottomlines", "notragged", "nowns", "nparallel", "nprec", "npreccurlyeq", "nrightarrow", "nsim", "nsimeq", "nsqsubseteq", "nsqsupseteq", "nsubset", "nsubseteq", "nsucc", "nsucccurlyeq", "nsupset", "nsupseteq", "ntilde", "ntimes", "ntriangleleft", "ntrianglelefteq", "ntriangleright", "ntrianglerighteq", "nu", "numberofpoints", "numbers", "nvDash", "nvdash", "nvleftarrow", "nvleftrightarrow", "nvrightarrow", "nwarrow", "nwsearrow", "oacute", "obeydepth", "objectdepth", "objectheight", "objectmargin", "objectwidth", "obox", "obreve", "ocaron", "ocircumflex", "ocircumflexacute", "ocircumflexdotbelow", "ocircumflexgrave", "ocircumflexhook", "ocircumflextilde", "octnumber", "octstringtonumber", "odiaeresis", "odiaeresismacron", "odot", "odotaccent", "odotaccentmacron", "odotbelow", "odoublegrave", "oeligature", "offset", "offsetbox", "ograve", "ohm", "ohook", "ohorn", "ohornacute", "ohorndotbelow", "ohorngrave", "ohornhook", "ohorntilde", "ohungarumlaut", "oiiint", "oiint", "oint", "ointclockwise", "ointctrclockwise", "oinvertedbreve", "omacron", "omega", "omicron", "ominus", "onedigitrounding", "oneeighth", "onefifth", "onehalf", "onequarter", "onesixth", "onesuperior", "onethird", "oogonek", "oogonekmacron", "operatorlanguage", "operatortext", "oplus", "ordfeminine", "ordinaldaynumber", "ordinalstr", "ordmasculine", "ornamenttext", "oslash", "ostroke", "ostrokeacute", "otilde", "otildemacron", "otimes", "outputfilename", "outputstreambox", "outputstreamcopy", "outputstreamunvbox", "outputstreamunvcopy", "over", "overbar", "overbars", "overbartext", "overbarunderbar", "overbrace", "overbracetext", "overbraceunderbrace", "overbracket", "overbrackettext", "overbracketunderbracket", "overlaybutton", "overlaycolor", "overlaydepth", "overlayfigure", "overlayheight", "overlayimage", "overlaylinecolor", "overlaylinewidth", "overlayoffset", "overlayrollbutton", "overlaywidth", "overleftarrow", "overleftharpoondown", "overleftharpoonup", "overleftrightarrow", "overloaderror", "overparent", "overparenttext", "overparentunderparent", "overrightarrow", "overrightharpoondown", "overrightharpoonup", "overset", "overstrike", "overstrikes", "overtwoheadleftarrow", "overtwoheadrightarrow", "owns", "page", "pagearea", "pagebreak", "pagefigure", "pageinjection", "pagenumber", "pagereference", "pagestaterealpage", "pagestaterealpageorder", "paletsize", "paragraphmark", "parallel", "part", "partial", "pdfTeX", "pdfactualtext", "pdfbackendactualtext", "pdfbackendcurrentresources", "pdfbackendsetcatalog", "pdfbackendsetcolorspace", "pdfbackendsetextgstate", "pdfbackendsetinfo", "pdfbackendsetname", "pdfbackendsetpageattribute", "pdfbackendsetpageresource", "pdfbackendsetpagesattribute", "pdfbackendsetpattern", "pdfbackendsetshade", "pdfcolor", "pdfeTeX", "percent", "percentdimen", "periodcentered", "periods", "permitcaretescape", "permitcircumflexescape", "permitspacesbetweengroups", "perp", "persiandecimals", "persiandecimalseparator", "persiannumerals", "persianthousandsseparator", "perthousand", "phantom", "phantombox", "phi", "phook", "pi", "pickupgroupedcommand", "pitchfork", "placeattachments", "placebookmarks", "placebtxrendering", "placechemical", "placecitation", "placecombinedlist", "placecomments", "placecontent", "placecurrentformulanumber", "placedbox", "placefigure", "placefloat", "placefloatcaption", "placefloatwithsetups", "placefootnotes", "placeformula", "placeframed", "placegraphic", "placeheadnumber", "placeheadtext", "placehelp", "placeholder", "placeindex", "placeinitial", "placeintermezzo", "placelayer", "placelayeredtext", "placelegend", "placelist", "placelistofabbreviations", "placelistofchemicals", "placelistoffigures", "placelistofgraphics", "placelistofintermezzi", "placelistoflogos", "placelistofpublications", "placelistofsorts", "placelistofsynonyms", "placelistoftables", "placelocalfootnotes", "placelocalnotes", "placement", "placenamedfloat", "placenamedformula", "placenotes", "placeongrid", "placeontopofeachother", "placepagenumber", "placepairedbox", "placeparallel", "placerawheaddata", "placerawheadnumber", "placerawheadtext", "placerawlist", "placeregister", "placerenderingwindow", "placesidebyside", "placesubformula", "placetable", "pm", "popattribute", "popmacro", "popmode", "popsystemmode", "position", "positionoverlay", "positionregionoverlay", "positivesign", "postponenotes", "prec", "precapprox", "preccurlyeq", "preceq", "preceqq", "precnapprox", "precneq", "precneqq", "precnsim", "precsim", "predefinedfont", "predefinefont", "predefinesymbol", "prefixedpagenumber", "prefixlanguage", "prefixtext", "prependetoks", "prependgvalue", "prependtocommalist", "prependtoks", "prependtoksonce", "prependvalue", "prerollblank", "presetbtxlabeltext", "presetdocument", "presetfieldsymbols", "presetheadtext", "presetlabeltext", "presetmathlabeltext", "presetoperatortext", "presetprefixtext", "presetsuffixtext", "presettaglabeltext", "presetunittext", "pretocommalist", "prettyprintbuffer", "prevcounter", "prevcountervalue", "preventmode", "prevrealpage", "prevrealpagenumber", "prevsubcountervalue", "prevsubpage", "prevsubpagenumber", "prevuserpage", "prevuserpagenumber", "prime", "primes", "procent", "processMPbuffer", "processMPfigurefile", "processaction", "processallactionsinset", "processassignlist", "processassignmentcommand", "processassignmentlist", "processbetween", "processblocks", "processbodyfontenvironmentlist", "processcolorcomponents", "processcommacommand", "processcommalist", "processcommalistwithparameters", "processcontent", "processfile", "processfilemany", "processfilenone", "processfileonce", "processfirstactioninset", "processisolatedchars", "processisolatedwords", "processlinetablebuffer", "processlinetablefile", "processlist", "processmonth", "processranges", "processseparatedlist", "processtexbuffer", "processtokens", "processuntil", "processxtablebuffer", "processyear", "prod", "product", "profiledbox", "profilegivenbox", "program", "project", "propto", "pseudoMixedCapped", "pseudoSmallCapped", "pseudoSmallcapped", "pseudosmallcapped", "psi", "punctuationspace", "purenumber", "pushattribute", "pushbutton", "pushmacro", "pushmode", "pushoutputstream", "pushsystemmode", "putboxincache", "putnextboxincache", "qquad", "quad", "quadrupleprime", "quads", "quarterstrut", "questiondown", "questionedeq", "quitcommalist", "quitprevcommalist", "quittypescriptscanning", "quotation", "quote", "quotedbl", "quotedblbase", "quotedblleft", "quotedblright", "quoteleft", "quoteright", "quotesingle", "quotesinglebase", "rVert", "racute", "raggedbottom", "raggedcenter", "raggedleft", "raggedright", "raggedwidecenter", "raisebox", "randomizetext", "randomnumber", "rangle", "rationals", "rawcounter", "rawcountervalue", "rawdate", "rawdoifelseinset", "rawdoifinset", "rawdoifinsetelse", "rawgetparameters", "rawprocessaction", "rawprocesscommacommand", "rawprocesscommalist", "rawstructurelistuservariable", "rawsubcountervalue", "rbox", "rbrace", "rbracket", "rcaron", "rceil", "rcommaaccent", "rdoublegrave", "readfile", "readfixfile", "readjobfile", "readlocfile", "readsetfile", "readsysfile", "readtexfile", "readxmlfile", "realSmallCapped", "realSmallcapped", "realpagenumber", "reals", "realsmallcapped", "recursedepth", "recurselevel", "recursestring", "redoconvertfont", "ref", "reference", "referencecolumnnumber", "referencepagedetail", "referencepagestate", "referenceprefix", "referencerealpage", "referencesymbol", "referring", "registerattachment", "registerctxluafile", "registered", "registerexternalfigure", "registerfontclass", "registerhyphenationexception", "registerhyphenationpattern", "registermenubuttons", "registerparwrapper", "registerparwrapperreverse", "registersort", "registersynonym", "registerunit", "regular", "relatemarking", "relateparameterhandlers", "relaxvalueifundefined", "relbar", "remainingcharacters", "remark", "removebottomthings", "removedepth", "removefromcommalist", "removelastskip", "removelastspace", "removemarkedcontent", "removepunctuation", "removesubstring", "removetoks", "removeunwantedspaces", "repeathead", "replacefeature", "replaceincommalist", "replaceword", "rescan", "rescanwithsetup", "resetMPdrawing", "resetMPenvironment", "resetMPinstance", "resetallattributes", "resetandaddfeature", "resetbar", "resetboxesincache", "resetbreakpoints", "resetbuffer", "resetcharacteralign", "resetcharacterkerning", "resetcharacterspacing", "resetcharacterstripping", "resetcollector", "resetcounter", "resetctxscanner", "resetdigitsmanipulation", "resetdirection", "resetfeature", "resetflag", "resetfontcolorsheme", "resetfontfallback", "resetfontsolution", "resethyphenationfeatures", "resetinjector", "resetinteractionmenu", "resetitaliccorrection", "resetlayer", "resetlocalfloats", "resetmarker", "resetmarking", "resetmode", "resetpagenumber", "resetparallel", "resetpath", "resetpenalties", "resetperiodkerning", "resetprofile", "resetrecurselevel", "resetreference", "resetreplacements", "resetscript", "resetsetups", "resetshownsynonyms", "resetsubpagenumber", "resetsymbolset", "resetsystemmode", "resettimer", "resettokenlist", "resettrackers", "resettrialtypesetting", "resetusedsortings", "resetusedsynonyms", "resetuserpagenumber", "resetvalue", "resetvisualizers", "reshapebox", "resolvedglyphdirect", "resolvedglyphstyled", "restartcounter", "restorebox", "restorecatcodes", "restorecounter", "restorecurrentattributes", "restoreendofline", "restoreglobalbodyfont", "restriction", "retestfeature", "reusableMPgraphic", "reuseMPgraphic", "reuserandomseed", "reverseddoubleprime", "reversedprime", "reversedtripleprime", "reversehbox", "reversehpack", "reversetpack", "reversevbox", "reversevboxcontent", "reversevpack", "reversevtop", "revivefeature", "rfence", "rfloor", "rgroup", "rhbox", "rho", "rhooknearrow", "rhookswarrow", "right", "rightaligned", "rightangle", "rightarrow", "rightarrowbar", "rightarrowtail", "rightarrowtriangle", "rightbottombox", "rightbox", "rightdasharrow", "rightguillemot", "rightharpoondown", "rightharpoonup", "righthbox", "rightheadtext", "rightlabeltext", "rightleftarrows", "rightleftharpoons", "rightline", "rightmathlabeltext", "rightorleftpageaction", "rightpageorder", "rightrightarrows", "rightskipadaption", "rightsquigarrow", "rightsubguillemot", "rightthreearrows", "rightthreetimes", "righttoleft", "righttolefthbox", "righttoleftvbox", "righttoleftvtop", "righttopbox", "rightwavearrow", "rightwhitearrow", "ring", "rinterval", "rinvertedbreve", "risingdotseq", "rlap", "rlointerval", "rmoustache", "rneq", "robustaddtocommalist", "robustdoifelseinset", "robustdoifinsetelse", "robustpretocommalist", "rointerval", "rollbutton", "roman", "romanC", "romanD", "romanI", "romanII", "romanIII", "romanIV", "romanIX", "romanL", "romanM", "romanV", "romanVI", "romanVII", "romanVIII", "romanX", "romanXI", "romanXII", "romanc", "romand", "romani", "romanii", "romaniii", "romaniv", "romanix", "romanl", "romanm", "romannumerals", "romanv", "romanvi", "romanvii", "romanviii", "romanx", "romanxi", "romanxii", "rootradical", "rotate", "rparent", "rrangle", "rrbracket", "rrointerval", "rtimes", "rtlhbox", "rtlvbox", "rtlvtop", "rtop", "ruby", "ruledhbox", "ruledhpack", "ruledmbox", "ruledtopv", "ruledtpack", "ruledvbox", "ruledvpack", "ruledvtop", "runMPbuffer", "runninghbox", "russianNumerals", "russiannumerals", "rvert", "sacute", "safechar", "samplefile", "sans", "sansbold", "sansnormal", "sansserif", "savebox", "savebtxdataset", "savebuffer", "savecounter", "savecurrentattributes", "savenormalmeaning", "savetaggedtwopassdata", "savetwopassdata", "sbox", "scale", "scaron", "scedilla", "schwa", "schwahook", "scircumflex", "scommaaccent", "screen", "searrow", "secondoffivearguments", "secondoffourarguments", "secondofsixarguments", "secondofthreearguments", "secondofthreeunexpanded", "secondoftwoarguments", "secondoftwounexpanded", "section", "sectionmark", "seeindex", "select", "selectblocks", "serializecommalist", "serializedcommalist", "serif", "serifbold", "serifnormal", "setJSpreamble", "setMPlayer", "setMPpositiongraphic", "setMPpositiongraphicrange", "setMPtext", "setMPvariable", "setMPvariables", "setautopagestaterealpageno", "setbar", "setbigbodyfont", "setboxllx", "setboxlly", "setbreakpoints", "setcapstrut", "setcatcodetable", "setcharacteralign", "setcharacteraligndetail", "setcharactercasing", "setcharactercleaning", "setcharacterkerning", "setcharacterspacing", "setcharacterstripping", "setcharstrut", "setcollector", "setcolormodell", "setcounter", "setcounterown", "setctxluafunction", "setcurrentfontclass", "setdataset", "setdatavalue", "setdefaultpenalties", "setdigitsmanipulation", "setdirection", "setdocumentargument", "setdocumentargumentdefault", "setdocumentfilename", "setdummyparameter", "setelementexporttag", "setemeasure", "setevalue", "setevariable", "setevariables", "setexpansion", "setfirstline", "setfirstpasscharacteralign", "setflag", "setfont", "setfontcolorsheme", "setfontfeature", "setfontsolution", "setfontstrut", "setfractions", "setglobalscript", "setgmeasure", "setgvalue", "setgvariable", "setgvariables", "sethboxregister", "sethyphenatedurlafter", "sethyphenatedurlbefore", "sethyphenatedurlnormal", "sethyphenationfeatures", "setinitial", "setinjector", "setinteraction", "setinterfacecommand", "setinterfaceconstant", "setinterfaceelement", "setinterfacemessage", "setinterfacevariable", "setinternalrendering", "setitaliccorrection", "setlayer", "setlayerframed", "setlayertext", "setlinefiller", "setlocalhsize", "setlocalscript", "setluatable", "setmainbodyfont", "setmainparbuilder", "setmarker", "setmarking", "setmathstyle", "setmeasure", "setmessagetext", "setminus", "setmode", "setnostrut", "setnote", "setnotetext", "setobject", "setoldstyle", "setpagereference", "setpagestate", "setpagestaterealpageno", "setparagraphfreezing", "setpenalties", "setpercentdimen", "setperiodkerning", "setposition", "setpositionbox", "setpositiondata", "setpositiondataplus", "setpositiononly", "setpositionplus", "setpositionstrut", "setprofile", "setrandomseed", "setreference", "setreferencedobject", "setregisterentry", "setreplacements", "setrigidcolumnbalance", "setrigidcolumnhsize", "setscript", "setsecondpasscharacteralign", "setsectionblock", "setsimplecolumnshsize", "setsmallbodyfont", "setsmallcaps", "setstackbox", "setstructurepageregister", "setstrut", "setsuperiors", "setsystemmode", "settabular", "settaggedmetadata", "settestcrlf", "settextcontent", "settightobject", "settightreferencedobject", "settightstrut", "settightunreferencedobject", "settokenlist", "settrialtypesetting", "setuevalue", "setugvalue", "setunreferencedobject", "setup", "setupMPgraphics", "setupMPinstance", "setupMPpage", "setupMPvariables", "setupTABLE", "setupTEXpage", "setupalign", "setupalternativestyles", "setuparranging", "setupattachment", "setupattachments", "setupbackend", "setupbackground", "setupbackgrounds", "setupbar", "setupbars", "setupblackrules", "setupblank", "setupbleeding", "setupblock", "setupbodyfont", "setupbodyfontenvironment", "setupbookmark", "setupbottom", "setupbottomtexts", "setupbtx", "setupbtxdataset", "setupbtxlabeltext", "setupbtxlist", "setupbtxregister", "setupbtxrendering", "setupbuffer", "setupbutton", "setupcapitals", "setupcaption", "setupcaptions", "setupcharacteralign", "setupcharacterkerning", "setupcharacterspacing", "setupchemical", "setupchemicalframed", "setupclipping", "setupcollector", "setupcolor", "setupcolors", "setupcolumns", "setupcolumnset", "setupcolumnsetarea", "setupcolumnsetareatext", "setupcolumnsetlines", "setupcolumnsetspan", "setupcolumnsetstart", "setupcombination", "setupcombinedlist", "setupcomment", "setupcontent", "setupcounter", "setupdataset", "setupdelimitedtext", "setupdescription", "setupdescriptions", "setupdirections", "setupdocument", "setupeffect", "setupenumeration", "setupenumerations", "setupenv", "setupexpansion", "setupexport", "setupexternalfigure", "setupexternalfigures", "setupexternalsoundtracks", "setupfacingfloat", "setupfield", "setupfieldbody", "setupfieldcategory", "setupfieldcontentframed", "setupfieldlabelframed", "setupfields", "setupfieldtotalframed", "setupfiller", "setupfillinlines", "setupfillinrules", "setupfirstline", "setupfittingpage", "setupfloat", "setupfloatframed", "setupfloats", "setupfloatsplitting", "setupfontexpansion", "setupfontprotrusion", "setupfonts", "setupfontsolution", "setupfooter", "setupfootertexts", "setupfootnotes", "setupforms", "setupformula", "setupformulae", "setupformulaframed", "setupframed", "setupframedcontent", "setupframedtable", "setupframedtablecolumn", "setupframedtablerow", "setupframedtext", "setupframedtexts", "setupglobalreferenceprefix", "setuphead", "setupheadalternative", "setupheader", "setupheadertexts", "setupheadnumber", "setupheads", "setupheadtext", "setuphelp", "setuphigh", "setuphighlight", "setuphyphenation", "setuphyphenmark", "setupindentedtext", "setupindenting", "setupindex", "setupinitial", "setupinsertion", "setupinteraction", "setupinteractionbar", "setupinteractionmenu", "setupinteractionscreen", "setupinterlinespace", "setupitaliccorrection", "setupitemgroup", "setupitemizations", "setupitemize", "setupitems", "setuplabel", "setuplabeltext", "setuplanguage", "setuplayer", "setuplayeredtext", "setuplayout", "setuplayouttext", "setuplegend", "setuplinefiller", "setuplinefillers", "setuplinenote", "setuplinenumbering", "setuplines", "setuplinetable", "setuplinewidth", "setuplist", "setuplistalternative", "setuplistextra", "setuplocalfloats", "setuplocalinterlinespace", "setuplow", "setuplowhigh", "setuplowmidhigh", "setupmakeup", "setupmarginblock", "setupmargindata", "setupmarginframed", "setupmarginrule", "setupmarginrules", "setupmarking", "setupmathalignment", "setupmathcases", "setupmathematics", "setupmathfence", "setupmathfraction", "setupmathfractions", "setupmathframed", "setupmathlabeltext", "setupmathmatrix", "setupmathornament", "setupmathradical", "setupmathstackers", "setupmathstyle", "setupmixedcolumns", "setupmodule", "setupmulticolumns", "setupnarrower", "setupnotation", "setupnotations", "setupnote", "setupnotes", "setupoffset", "setupoffsetbox", "setupoperatortext", "setupoppositeplacing", "setuporientation", "setupoutput", "setupoutputroutine", "setuppagechecker", "setuppagecolumns", "setuppagecomment", "setuppageinjection", "setuppageinjectionalternative", "setuppagenumber", "setuppagenumbering", "setuppageshift", "setuppagestate", "setuppagetransitions", "setuppairedbox", "setuppalet", "setuppaper", "setuppapersize", "setupparagraph", "setupparagraphintro", "setupparagraphnumbering", "setupparagraphs", "setupparallel", "setupperiodkerning", "setupperiods", "setupplaceholder", "setupplacement", "setuppositionbar", "setuppositioning", "setupprefixtext", "setupprocessor", "setupprofile", "setupprograms", "setupquotation", "setupquote", "setuprealpagenumber", "setupreferenceformat", "setupreferenceprefix", "setupreferencestructureprefix", "setupreferencing", "setupregister", "setupregisters", "setuprenderingwindow", "setuprotate", "setupruby", "setups", "setupscale", "setupscript", "setupscripts", "setupsectionblock", "setupselector", "setupshift", "setupsidebar", "setupsorting", "setupspacing", "setupspellchecking", "setupstartstop", "setupstretched", "setupstrut", "setupstyle", "setupsubformula", "setupsubformulas", "setupsubpagenumber", "setupsuffixtext", "setupsymbols", "setupsymbolset", "setupsynctex", "setupsynonyms", "setupsystem", "setuptables", "setuptabulate", "setuptagging", "setuptaglabeltext", "setuptext", "setuptextbackground", "setuptextflow", "setuptextnote", "setuptextrules", "setuptexttexts", "setupthinrules", "setuptolerance", "setuptooltip", "setuptop", "setuptoptexts", "setuptype", "setuptyping", "setupunit", "setupunittext", "setupurl", "setupuserdata", "setupuserdataalternative", "setupuserpagenumber", "setupversion", "setupviewerlayer", "setupvspacing", "setupwhitespace", "setupwithargument", "setupwithargumentswapped", "setupxml", "setupxtable", "setuvalue", "setuxvalue", "setvalue", "setvariable", "setvariables", "setvboxregister", "setvisualizerfont", "setvtopregister", "setwidthof", "setxmeasure", "setxvalue", "setxvariable", "setxvariables", "seveneighths", "sfrac", "shapedhbox", "sharp", "shiftbox", "shiftdown", "shiftup", "showallmakeup", "showattributes", "showbodyfont", "showbodyfontenvironment", "showboxes", "showbtxdatasetauthors", "showbtxdatasetcompleteness", "showbtxdatasetfields", "showbtxfields", "showbtxhashedauthors", "showbtxtables", "showchardata", "showcharratio", "showcolor", "showcolorbar", "showcolorcomponents", "showcolorgroup", "showcolorset", "showcolorstruts", "showcounter", "showdirectives", "showdirsinmargin", "showedebuginfo", "showexperiments", "showfont", "showfontdata", "showfontexpansion", "showfontitalics", "showfontkerns", "showfontparameters", "showfontstrip", "showfontstyle", "showframe", "showglyphdata", "showglyphs", "showgrid", "showgridsnapping", "showhelp", "showhyphenationtrace", "showhyphens", "showinjector", "showjustification", "showkerning", "showlayout", "showlayoutcomponents", "showligature", "showligatures", "showlogcategories", "showluatables", "showmakeup", "showmargins", "showmessage", "showminimalbaseline", "shownextbox", "showotfcomposition", "showpalet", "showparentchain", "showparwrapperstate", "showprint", "showsetups", "showsetupsdefinition", "showstruts", "showsymbolset", "showtimer", "showtokens", "showtrackers", "showvalue", "showvariable", "showwarning", "sigma", "signalrightpage", "sim", "simeq", "simplealignedbox", "simplealignedboxplus", "simplealignedspreadbox", "simplegroupedcommand", "simplereversealignedbox", "simplereversealignedboxplus", "singalcharacteralign", "singlebond", "singleverticalbar", "sixperemspace", "sixthofsixarguments", "slanted", "slantedbold", "slantedface", "slash", "slicepages", "slong", "slovenianNumerals", "sloveniannumerals", "small", "smallbodyfont", "smallbold", "smallbolditalic", "smallboldslanted", "smallcappedcharacters", "smallcappedromannumerals", "smallcaps", "smaller", "smallitalicbold", "smallnormal", "smallskip", "smallslanted", "smallslantedbold", "smalltype", "smash", "smashbox", "smashboxed", "smashedhbox", "smashedvbox", "smile", "snaptogrid", "softhyphen", "solidus", "someheadnumber", "somekindoftab", "someline", "somelocalfloat", "somenamedheadnumber", "someplace", "somewhere", "space", "spaceddigits", "spaceddigitsmethod", "spaceddigitsseparator", "spaceddigitssymbol", "spadesuit", "spanishNumerals", "spanishnumerals", "speech", "sphericalangle", "splitatasterisk", "splitatcolon", "splitatcolons", "splitatcomma", "splitatperiod", "splitdfrac", "splitfilename", "splitfloat", "splitfrac", "splitoffbase", "splitofffull", "splitoffkind", "splitoffname", "splitoffpath", "splitoffroot", "splitofftokens", "splitofftype", "splitstring", "spreadhbox", "sqcap", "sqcup", "sqrt", "sqsubset", "sqsubseteq", "sqsubsetneq", "sqsupset", "sqsupseteq", "sqsupsetneq", "square", "squaredots", "ssharp", "stackrel", "stackscripts", "star", "stareq", "startJScode", "startJSpreamble", "startLUA", "startMP", "startMPclip", "startMPcode", "startMPdefinitions", "startMPdrawing", "startMPenvironment", "startMPextensions", "startMPinclusions", "startMPinitializations", "startMPpage", "startMPpositiongraphic", "startMPpositionmethod", "startMPrun", "startPARSEDXML", "startTABLE", "startTABLEbody", "startTABLEfoot", "startTABLEhead", "startTABLEnested", "startTABLEnext", "startTC", "startTD", "startTDs", "startTEX", "startTEXpage", "startTH", "startTN", "startTR", "startTRs", "startTX", "startTY", "startXML", "startalign", "startalignment", "startallmodes", "startappendices", "startarrangedpages", "startasciimode", "startaside", "startattachment", "startbackground", "startbackmatter", "startbar", "startbbordermatrix", "startbitmapimage", "startblockquote", "startbodymatter", "startbordermatrix", "startboxedcolumns", "startbtxlabeltext", "startbtxrenderingdefinitions", "startbuffer", "startcases", "startcatcodetable", "startcenteraligned", "startchapter", "startcharacteralign", "startcheckedfences", "startchemical", "startchemicaltext", "startcollect", "startcollecting", "startcolor", "startcolorintent", "startcoloronly", "startcolorset", "startcolumns", "startcolumnset", "startcolumnsetspan", "startcombination", "startcomment", "startcomponent", "startcontextcode", "startcontextdefinitioncode", "startctxfunction", "startctxfunctiondefinition", "startcurrentcolor", "startcurrentlistentrywrapper", "startdelimited", "startdelimitedtext", "startdisplaymath", "startdmath", "startdocument", "starteffect", "startelement", "startembeddedxtable", "startendnote", "startendofline", "startenvironment", "startexceptions", "startexpanded", "startexpandedcollect", "startextendedcatcodetable", "startexternalfigurecollection", "startfacingfloat", "startfact", "startfigure", "startfiguretext", "startfittingpage", "startfixed", "startfloatcombination", "startfont", "startfontclass", "startfontsolution", "startfootnote", "startformula", "startformulas", "startframed", "startframedcell", "startframedcontent", "startframedrow", "startframedtable", "startframedtext", "startfrontmatter", "startgoto", "startgraphictext", "startgridsnapping", "starthanging", "starthbox", "starthboxestohbox", "starthboxregister", "starthead", "startheadtext", "starthelptext", "starthiding", "starthighlight", "starthyphenation", "startimath", "startindentation", "startindentedtext", "startinteraction", "startinteractionmenu", "startinterface", "startintermezzotext", "startintertext", "startitem", "startitemgroup", "startitemgroupcolumns", "startitemize", "startknockout", "startlabeltext", "startlanguage", "startlayout", "startleftaligned", "startlegend", "startline", "startlinealignment", "startlinecorrection", "startlinefiller", "startlinenote", "startlinenumbering", "startlines", "startlinetable", "startlinetablebody", "startlinetablecell", "startlinetablehead", "startlocalfootnotes", "startlocalheadsetup", "startlocallinecorrection", "startlocalnotes", "startlocalsetups", "startlua", "startluacode", "startluaparameterset", "startluasetups", "startmakeup", "startmarginblock", "startmarginrule", "startmarkedcontent", "startmarkpages", "startmathalignment", "startmathcases", "startmathlabeltext", "startmathmatrix", "startmathmode", "startmathstyle", "startmatrices", "startmatrix", "startmaxaligned", "startmdformula", "startmidaligned", "startmiddlealigned", "startmiddlemakeup", "startmixedcolumns", "startmode", "startmodeset", "startmodule", "startmoduletestsection", "startmpformula", "startmulticolumns", "startnamedsection", "startnamedsubformulas", "startnarrow", "startnarrower", "startnegative", "startnicelyfilledbox", "startnointerference", "startnotallmodes", "startnotext", "startnotmode", "startoperatortext", "startopposite", "startoutputstream", "startoverlay", "startoverprint", "startpacked", "startpagecolumns", "startpagecomment", "startpagefigure", "startpagelayout", "startpagemakeup", "startpar", "startparagraph", "startparagraphs", "startparagraphscell", "startparbuilder", "startpart", "startpath", "startplacechemical", "startplacefigure", "startplacefloat", "startplaceformula", "startplacegraphic", "startplaceintermezzo", "startplacelegend", "startplacepairedbox", "startplacetable", "startpositioning", "startpositionoverlay", "startpositive", "startpostponing", "startpostponingnotes", "startprefixtext", "startprocessassignmentcommand", "startprocessassignmentlist", "startprocesscommacommand", "startprocesscommalist", "startproduct", "startproject", "startprotect", "startprotectedcolors", "startpublication", "startpunctuation", "startquotation", "startquote", "startrandomized", "startrandomseed", "startrawsetups", "startreadingfile", "startreferenceprefix", "startregime", "startregister", "startreusableMPgraphic", "startrightaligned", "startruby", "startscript", "startsdformula", "startsection", "startsectionblock", "startsectionblockenvironment", "startsectionlevel", "startsetups", "startshapebox", "startshift", "startsidebar", "startsimplecolumns", "startspecialitem", "startspeech", "startspformula", "startsplitformula", "startsplittext", "startspread", "startstandardmakeup", "startstaticMPfigure", "startstaticMPgraphic", "startstrictinspectnextcharacter", "startstructurepageregister", "startstrut", "startstyle", "startsubformulas", "startsubject", "startsubjectlevel", "startsubsection", "startsubsentence", "startsubstack", "startsubsubject", "startsubsubsection", "startsubsubsubject", "startsubsubsubsection", "startsubsubsubsubject", "startsubsubsubsubsection", "startsubsubsubsubsubject", "startsuffixtext", "startsymbolset", "starttable", "starttablehead", "starttables", "starttabletail", "starttabletext", "starttabulate", "starttabulatehead", "starttabulatetail", "starttagged", "starttaglabeltext", "starttexcode", "starttexdefinition", "starttext", "starttextbackground", "starttextbackgroundmanual", "starttextcolor", "starttextcolorintent", "starttextflow", "starttextmakeup", "starttextrule", "startthematrix", "starttitle", "starttokenlist", "starttokens", "starttransparent", "starttypescript", "starttypescriptcollection", "starttyping", "startuniqueMPgraphic", "startuniqueMPpagegraphic", "startunittext", "startunpacked", "startusableMPgraphic", "startuseMPgraphic", "startusemathstyleparameter", "startuserdata", "startusingbtxspecification", "startvbox", "startvboxregister", "startvboxtohbox", "startvboxtohboxseparator", "startviewerlayer", "startvtop", "startvtopregister", "startxcell", "startxcellgroup", "startxcolumn", "startxgroup", "startxmldisplayverbatim", "startxmlinlineverbatim", "startxmlraw", "startxmlsetups", "startxrow", "startxrowgroup", "startxtable", "startxtablebody", "startxtablefoot", "startxtablehead", "startxtablenext", "stligature", "stopJScode", "stopJSpreamble", "stopLUA", "stopMP", "stopMPclip", "stopMPcode", "stopMPdefinitions", "stopMPdrawing", "stopMPenvironment", "stopMPextensions", "stopMPinclusions", "stopMPinitializations", "stopMPpage", "stopMPpositiongraphic", "stopMPpositionmethod", "stopMPrun", "stopPARSEDXML", "stopTABLE", "stopTABLEbody", "stopTABLEfoot", "stopTABLEhead", "stopTABLEnested", "stopTABLEnext", "stopTC", "stopTD", "stopTDs", "stopTEX", "stopTEXpage", "stopTH", "stopTN", "stopTR", "stopTRs", "stopTX", "stopTY", "stopXML", "stopalign", "stopalignment", "stopallmodes", "stopappendices", "stoparrangedpages", "stopasciimode", "stopaside", "stopattachment", "stopbackground", "stopbackmatter", "stopbar", "stopbbordermatrix", "stopbitmapimage", "stopblockquote", "stopbodymatter", "stopbordermatrix", "stopboxedcolumns", "stopbtxlabeltext", "stopbtxrenderingdefinitions", "stopbuffer", "stopcases", "stopcatcodetable", "stopcenteraligned", "stopchapter", "stopcharacteralign", "stopcheckedfences", "stopchemical", "stopchemicaltext", "stopcollect", "stopcollecting", "stopcolor", "stopcolorintent", "stopcoloronly", "stopcolorset", "stopcolumns", "stopcolumnset", "stopcolumnsetspan", "stopcombination", "stopcomment", "stopcomponent", "stopcontextcode", "stopcontextdefinitioncode", "stopctxfunction", "stopctxfunctiondefinition", "stopcurrentcolor", "stopcurrentlistentrywrapper", "stopdelimited", "stopdelimitedtext", "stopdisplaymath", "stopdmath", "stopdocument", "stopeffect", "stopelement", "stopembeddedxtable", "stopendnote", "stopendofline", "stopenvironment", "stopexceptions", "stopexpanded", "stopexpandedcollect", "stopextendedcatcodetable", "stopexternalfigurecollection", "stopfacingfloat", "stopfact", "stopfigure", "stopfiguretext", "stopfittingpage", "stopfixed", "stopfloatcombination", "stopfont", "stopfontclass", "stopfontsolution", "stopfootnote", "stopformula", "stopformulas", "stopframed", "stopframedcell", "stopframedcontent", "stopframedrow", "stopframedtable", "stopframedtext", "stopfrontmatter", "stopgoto", "stopgraphictext", "stopgridsnapping", "stophanging", "stophbox", "stophboxestohbox", "stophboxregister", "stophead", "stopheadtext", "stophelptext", "stophiding", "stophighlight", "stophyphenation", "stopimath", "stopindentation", "stopindentedtext", "stopinteraction", "stopinteractionmenu", "stopinterface", "stopintermezzotext", "stopintertext", "stopitem", "stopitemgroup", "stopitemgroupcolumns", "stopitemize", "stopknockout", "stoplabeltext", "stoplanguage", "stoplayout", "stopleftaligned", "stoplegend", "stopline", "stoplinealignment", "stoplinecorrection", "stoplinefiller", "stoplinenote", "stoplinenumbering", "stoplines", "stoplinetable", "stoplinetablebody", "stoplinetablecell", "stoplinetablehead", "stoplocalfootnotes", "stoplocalheadsetup", "stoplocallinecorrection", "stoplocalnotes", "stoplocalsetups", "stoplua", "stopluacode", "stopluaparameterset", "stopluasetups", "stopmakeup", "stopmarginblock", "stopmarginrule", "stopmarkedcontent", "stopmarkpages", "stopmathalignment", "stopmathcases", "stopmathlabeltext", "stopmathmatrix", "stopmathmode", "stopmathstyle", "stopmatrices", "stopmatrix", "stopmaxaligned", "stopmdformula", "stopmidaligned", "stopmiddlealigned", "stopmiddlemakeup", "stopmixedcolumns", "stopmode", "stopmodeset", "stopmodule", "stopmoduletestsection", "stopmpformula", "stopmulticolumns", "stopnamedsection", "stopnamedsubformulas", "stopnarrow", "stopnarrower", "stopnegative", "stopnicelyfilledbox", "stopnointerference", "stopnotallmodes", "stopnotext", "stopnotmode", "stopoperatortext", "stopopposite", "stopoutputstream", "stopoverlay", "stopoverprint", "stoppacked", "stoppagecolumns", "stoppagecomment", "stoppagefigure", "stoppagelayout", "stoppagemakeup", "stoppar", "stopparagraph", "stopparagraphs", "stopparagraphscell", "stopparbuilder", "stoppart", "stoppath", "stopplacechemical", "stopplacefigure", "stopplacefloat", "stopplaceformula", "stopplacegraphic", "stopplaceintermezzo", "stopplacelegend", "stopplacepairedbox", "stopplacetable", "stoppositioning", "stoppositionoverlay", "stoppositive", "stoppostponing", "stoppostponingnotes", "stopprefixtext", "stopprocessassignmentcommand", "stopprocessassignmentlist", "stopprocesscommacommand", "stopprocesscommalist", "stopproduct", "stopproject", "stopprotect", "stopprotectedcolors", "stoppublication", "stoppunctuation", "stopquotation", "stopquote", "stoprandomized", "stoprandomseed", "stoprawsetups", "stopreadingfile", "stopreferenceprefix", "stopregime", "stopreusableMPgraphic", "stoprightaligned", "stopruby", "stopscript", "stopsdformula", "stopsection", "stopsectionblock", "stopsectionblockenvironment", "stopsectionlevel", "stopsetups", "stopshapebox", "stopshift", "stopsidebar", "stopsimplecolumns", "stopspecialitem", "stopspeech", "stopspformula", "stopsplitformula", "stopsplittext", "stopspread", "stopstandardmakeup", "stopstaticMPfigure", "stopstaticMPgraphic", "stopstrictinspectnextcharacter", "stopstrut", "stopstyle", "stopsubformulas", "stopsubject", "stopsubjectlevel", "stopsubsection", "stopsubsentence", "stopsubstack", "stopsubsubject", "stopsubsubsection", "stopsubsubsubject", "stopsubsubsubsection", "stopsubsubsubsubject", "stopsubsubsubsubsection", "stopsubsubsubsubsubject", "stopsuffixtext", "stopsymbolset", "stoptable", "stoptablehead", "stoptables", "stoptabletail", "stoptabletext", "stoptabulate", "stoptabulatehead", "stoptabulatetail", "stoptagged", "stoptaglabeltext", "stoptexcode", "stoptexdefinition", "stoptext", "stoptextbackground", "stoptextbackgroundmanual", "stoptextcolor", "stoptextcolorintent", "stoptextflow", "stoptextmakeup", "stoptextrule", "stopthematrix", "stoptitle", "stoptokenlist", "stoptokens", "stoptransparent", "stoptypescript", "stoptypescriptcollection", "stoptyping", "stopuniqueMPgraphic", "stopuniqueMPpagegraphic", "stopunittext", "stopunpacked", "stopusableMPgraphic", "stopuseMPgraphic", "stopusemathstyleparameter", "stopuserdata", "stopusingbtxspecification", "stopvbox", "stopvboxregister", "stopvboxtohbox", "stopvboxtohboxseparator", "stopviewerlayer", "stopvtop", "stopvtopregister", "stopxcell", "stopxcellgroup", "stopxcolumn", "stopxgroup", "stopxmldisplayverbatim", "stopxmlinlineverbatim", "stopxmlraw", "stopxmlsetups", "stopxrow", "stopxrowgroup", "stopxtable", "stopxtablebody", "stopxtablefoot", "stopxtablehead", "stopxtablenext", "stretched", "strictdoifelsenextoptional", "strictdoifnextoptionalelse", "stripcharacter", "strippedcsname", "stripspaces", "structurelistuservariable", "structurenumber", "structuretitle", "structureuservariable", "structurevariable", "strut", "strutdp", "strutgap", "strutht", "struthtdp", "struttedbox", "strutwd", "style", "styleinstance", "subject", "subpagenumber", "subsection", "subsentence", "subset", "subseteq", "subseteqq", "subsetneq", "subsetneqq", "substituteincommalist", "subsubject", "subsubsection", "subsubsubject", "subsubsubsection", "subsubsubsubject", "subsubsubsubsection", "subsubsubsubsubject", "subtractfeature", "succ", "succapprox", "succcurlyeq", "succeq", "succeqq", "succnapprox", "succneq", "succneqq", "succnsim", "succsim", "suffixlanguage", "suffixtext", "sum", "supset", "supseteq", "supseteqq", "supsetneq", "supsetneqq", "surd", "surdradical", "swapcounts", "swapdimens", "swapface", "swapmacros", "swaptypeface", "swarrow", "switchstyleonly", "switchtobodyfont", "switchtocolor", "switchtointerlinespace", "symbol", "symbolreference", "synchronizeblank", "synchronizeindenting", "synchronizemarking", "synchronizeoutputstreams", "synchronizestrut", "synchronizewhitespace", "synctexblockfilename", "synctexresetfilename", "synctexsetfilename", "systemlog", "systemlogfirst", "systemloglast", "systemsetups", "tLeftarrow", "tLeftrightarrow", "tRightarrow", "tabulateautoline", "tabulateautorule", "tabulateline", "tabulaterule", "taggedctxcommand", "taggedlabeltexts", "taglabellanguage", "taglabeltext", "tau", "tbinom", "tbox", "tcaron", "tcedilla", "tcommaaccent", "tcurl", "tequal", "test", "testandsplitstring", "testcolumn", "testfeature", "testfeatureonce", "testpage", "testpageonly", "testpagesync", "testtokens", "tex", "texdefinition", "texsetup", "textAngstrom", "textacute", "textampersand", "textasciicircum", "textasciitilde", "textat", "textbackslash", "textbar", "textbottomcomma", "textbottomdot", "textbraceleft", "textbraceright", "textbreve", "textbrokenbar", "textbullet", "textcaron", "textcedilla", "textcelsius", "textcent", "textcircledP", "textcircumflex", "textcitation", "textcite", "textcomma", "textcontrolspace", "textcurrency", "textdag", "textddag", "textdegree", "textdiaeresis", "textdiv", "textdollar", "textdong", "textdotaccent", "textellipsis", "texteuro", "textflowcollector", "textfraction", "textgrave", "texthash", "texthorizontalbar", "texthungarumlaut", "texthyphen", "textkelvin", "textlognot", "textmacron", "textmath", "textmho", "textminus", "textmu", "textmultiply", "textnumero", "textogonek", "textohm", "textormathchar", "textormathchars", "textounce", "textpercent", "textperiod", "textplus", "textpm", "textreference", "textring", "textrule", "textslash", "textsterling", "texttilde", "textunderscore", "textvisiblespace", "textyen", "thai", "thainumerals", "thedatavalue", "thefirstcharacter", "thematrix", "thenormalizedbodyfontsize", "theorientation", "therefore", "theremainingcharacters", "theta", "thickspace", "thinrule", "thinrules", "thinspace", "thirdoffivearguments", "thirdoffourarguments", "thirdofsixarguments", "thirdofthreearguments", "thirdofthreeunexpanded", "thook", "thookleftarrow", "thookrightarrow", "thorn", "threedigitrounding", "threeeighths", "threefifths", "threeperemspace", "threequarter", "threesuperior", "tibetannumerals", "tightlayer", "tilde", "times", "tinyfont", "title", "tlap", "tleftarrow", "tleftharpoondown", "tleftharpoonup", "tleftrightarrow", "tleftrightharpoons", "tmapsto", "to", "tochar", "tolinenote", "tooltip", "top", "topbox", "topleftbox", "toplinebox", "toprightbox", "topskippedbox", "tracecatcodetables", "tracedfontname", "tracedpagestate", "traceoutputroutines", "tracepositions", "trademark", "translate", "transparencycomponents", "transparent", "trel", "triangle", "triangledown", "triangleleft", "triangleq", "triangleright", "trightarrow", "trightharpoondown", "trightharpoonup", "trightleftharpoons", "trightoverleftarrow", "triplebond", "tripleprime", "tripleverticalbar", "truefilename", "truefontname", "tstroke", "ttraggedright", "ttriplerel", "ttwoheadleftarrow", "ttwoheadrightarrow", "turnediota", "twodigitrounding", "twofifths", "twoheaddownarrow", "twoheadleftarrow", "twoheadrightarrow", "twoheadrightarrowtail", "twoheaduparrow", "twosuperior", "twothirds", "tx", "txx", "typ", "type", "typebuffer", "typedefinedbuffer", "typeface", "typefile", "typeinlinebuffer", "typescriptone", "typescriptprefix", "typescriptthree", "typescripttwo", "typesetbuffer", "typesetbufferonly", "typesetfile", "uacute", "ubreve", "ucaron", "uchexnumber", "uchexnumbers", "ucircumflex", "uconvertnumber", "udiaeresis", "udiaeresisacute", "udiaeresiscaron", "udiaeresisgrave", "udiaeresismacron", "udotbelow", "udots", "udoublegrave", "uedcatcodecommand", "ugrave", "uhook", "uhorn", "uhornacute", "uhorndotbelow", "uhorngrave", "uhornhook", "uhorntilde", "uhungarumlaut", "uinvertedbreve", "ulcorner", "umacron", "undefinevalue", "undepthed", "underbar", "underbars", "underbartext", "underbrace", "underbracetext", "underbracket", "underbrackettext", "underdash", "underdashes", "underdot", "underdots", "underleftarrow", "underleftharpoondown", "underleftharpoonup", "underleftrightarrow", "underparent", "underparenttext", "underrandom", "underrandoms", "underrightarrow", "underrightharpoondown", "underrightharpoonup", "underset", "understrike", "understrikes", "undertwoheadleftarrow", "undertwoheadrightarrow", "undoassign", "unexpandeddocumentvariable", "unframed", "unhhbox", "unihex", "uniqueMPgraphic", "uniqueMPpagegraphic", "unit", "unitlanguage", "unitshigh", "unitslow", "unittext", "unknown", "unprotected", "unregisterhyphenationpattern", "unregisterparwrapper", "unspaceafter", "unspaceargument", "unspaced", "unspacestring", "unstackscripts", "untexargument", "untexcommand", "uogonek", "upand", "uparrow", "updasharrow", "updateparagraphdemerits", "updateparagraphpenalties", "updateparagraphproperties", "updateparagraphshapes", "updownarrow", "updownarrowbar", "updownarrows", "upharpoonleft", "upharpoonright", "uplus", "uppercased", "uppercasestring", "uppercasing", "upperleftdoubleninequote", "upperleftdoublesixquote", "upperleftsingleninequote", "upperleftsinglesixquote", "upperrightdoubleninequote", "upperrightdoublesixquote", "upperrightsingleninequote", "upperrightsinglesixquote", "upsilon", "upuparrows", "upwhitearrow", "urcorner", "uring", "url", "useJSscripts", "useMPenvironmentbuffer", "useMPgraphic", "useMPlibrary", "useMPrun", "useMPvariables", "useURL", "usealignparameter", "useblankparameter", "useblocks", "usebodyfont", "usebodyfontparameter", "usebtxdataset", "usebtxdefinitions", "usecitation", "usecolors", "usecomponent", "usedirectory", "usedummycolorparameter", "usedummystyleandcolor", "usedummystyleparameter", "useenvironment", "useexternaldocument", "useexternalfigure", "useexternalrendering", "useexternalsoundtrack", "usefigurebase", "usefile", "usefontpath", "usegridparameter", "usehyphensparameter", "useindentingparameter", "useindentnextparameter", "useinterlinespaceparameter", "uselanguageparameter", "useluamodule", "useluatable", "usemathstyleparameter", "usemodule", "useproduct", "useprofileparameter", "useproject", "usereferenceparameter", "userpagenumber", "usesetupsparameter", "usestaticMPfigure", "usesubpath", "usesymbols", "usetexmodule", "usetypescript", "usetypescriptfile", "useurl", "usezipfile", "utfchar", "utflower", "utfupper", "utilde", "utilityregisterlength", "vDash", "validassignment", "varTheta", "varepsilon", "varkappa", "varnothing", "varphi", "varpi", "varrho", "varsigma", "vartheta", "vboxreference", "vdash", "vdots", "vec", "vee", "veebar", "veeeq", "verbatim", "verbatimstring", "verbosenumber", "version", "vert", "verticalgrowingbar", "verticalpositionbar", "veryraggedcenter", "veryraggedleft", "veryraggedright", "vglue", "viewerlayer", "vl", "vpackbox", "vpackedbox", "vphantom", "vpos", "vsmash", "vsmashbox", "vsmashed", "vspace", "vspacing", "wcircumflex", "wdofstring", "wedge", "wedgeeq", "weekday", "whitearrowupfrombar", "widehat", "widetilde", "widthofstring", "widthspanningtext", "withoutpt", "word", "wordright", "words", "wordtonumber", "wp", "wr", "writebetweenlist", "writedatatolist", "writestatus", "writetolist", "xLeftarrow", "xLeftrightarrow", "xRightarrow", "xdefconvertedargument", "xequal", "xfrac", "xhookleftarrow", "xhookrightarrow", "xi", "xleftarrow", "xleftharpoondown", "xleftharpoonup", "xleftrightarrow", "xleftrightharpoons", "xmapsto", "xmladdindex", "xmlafterdocumentsetup", "xmlaftersetup", "xmlall", "xmlappenddocumentsetup", "xmlappendsetup", "xmlapplyselectors", "xmlatt", "xmlattdef", "xmlattribute", "xmlattributedef", "xmlbadinclusions", "xmlbeforedocumentsetup", "xmlbeforesetup", "xmlchainatt", "xmlchainattdef", "xmlchecknamespace", "xmlcommand", "xmlconcat", "xmlconcatrange", "xmlcontext", "xmlcount", "xmldefaulttotext", "xmldepth", "xmldirectives", "xmldirectivesafter", "xmldirectivesbefore", "xmldisplayverbatim", "xmldoif", "xmldoifatt", "xmldoifelse", "xmldoifelseatt", "xmldoifelseempty", "xmldoifelseselfempty", "xmldoifelsetext", "xmldoifelsevalue", "xmldoifnot", "xmldoifnotatt", "xmldoifnotselfempty", "xmldoifnottext", "xmldoifselfempty", "xmldoiftext", "xmlelement", "xmlfilter", "xmlfirst", "xmlflush", "xmlflushcontext", "xmlflushdocumentsetups", "xmlflushlinewise", "xmlflushpure", "xmlflushspacewise", "xmlflushtext", "xmlinclude", "xmlinclusion", "xmlinclusions", "xmlinfo", "xmlinjector", "xmlinlineprettyprint", "xmlinlineprettyprinttext", "xmlinlineverbatim", "xmlinstalldirective", "xmllast", "xmllastatt", "xmllastmatch", "xmllastpar", "xmlloadbuffer", "xmlloaddata", "xmlloaddirectives", "xmlloadfile", "xmlloadonly", "xmlmain", "xmlmapvalue", "xmlname", "xmlnamespace", "xmlnonspace", "xmlpar", "xmlparam", "xmlpath", "xmlpos", "xmlposition", "xmlprependdocumentsetup", "xmlprependsetup", "xmlprettyprint", "xmlprettyprinttext", "xmlprocessbuffer", "xmlprocessdata", "xmlprocessfile", "xmlpure", "xmlraw", "xmlrefatt", "xmlregistereddocumentsetups", "xmlregisteredsetups", "xmlregisterns", "xmlremapname", "xmlremapnamespace", "xmlremovedocumentsetup", "xmlremovesetup", "xmlresetdocumentsetups", "xmlresetinjectors", "xmlresetsetups", "xmlsave", "xmlsetatt", "xmlsetattribute", "xmlsetentity", "xmlsetfunction", "xmlsetinjectors", "xmlsetpar", "xmlsetparam", "xmlsetsetup", "xmlsetup", "xmlshow", "xmlsnippet", "xmlstrip", "xmlstripnolines", "xmlstripped", "xmlstrippednolines", "xmltag", "xmltexentity", "xmltext", "xmltobuffer", "xmltobufferverbose", "xmltofile", "xmlvalue", "xmlverbatim", "xrel", "xrightarrow", "xrightharpoondown", "xrightharpoonup", "xrightleftharpoons", "xrightoverleftarrow", "xsplitstring", "xtriplerel", "xtwoheadleftarrow", "xtwoheadrightarrow", "xxfrac", "xypos", "yacute", "ycircumflex", "ydiaeresis", "ydotbelow", "yen", "ygrave", "yhook", "yiddishnumerals", "ymacron", "ytilde", "zacute", "zcaron", "zdotaccent", "zeronumberconversion", "zerowidthnobreakspace", "zerowidthspace", "zeta", "zhook", "zstroke", "zwj", "zwnj" },
- ["en"]={},
-} \ No newline at end of file
diff --git a/context/data/textadept/context/data/scite-context-data-metafun.lua b/context/data/textadept/context/data/scite-context-data-metafun.lua
deleted file mode 100644
index eea8198bc..000000000
--- a/context/data/textadept/context/data/scite-context-data-metafun.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-return {
- ["commands"]={ "loadfile", "loadimage", "loadmodule", "dispose", "nothing", "transparency", "tolist", "topath", "tocycle", "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh", "tanh", "zmod", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "unittriangle", "fulltriangle", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "uptriangle", "downtriangle", "lefttriangle", "righttriangle", "triangle", "smoothed", "cornered", "superellipsed", "randomized", "randomizedcontrols", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "perpendicular", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "drawdoublearrows", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "boundingradius", "boundingcircle", "boundingpoint", "crossingunder", "insideof", "outsideof", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withcircularshade", "withlinearshade", "defineshade", "shaded", "shadedinto", "withshadecolors", "withshadedomain", "withshademethod", "withshadefactor", "withshadevector", "withshadecenter", "withshadedirection", "withshaderadius", "withshadetransform", "withshadecenterone", "withshadecentertwo", "withshadestep", "withshadefraction", "withshadeorigin", "shownshadevector", "shownshadeorigin", "shownshadedirection", "shownshadecenter", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "figure", "register", "outlinetext", "filloutlinetext", "drawoutlinetext", "outlinetexttopath", "checkedbounds", "checkbounds", "strut", "rule", "withmask", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "colordecimalslist", "textext", "thetextext", "rawtextext", "textextoffset", "texbox", "thetexbox", "rawtexbox", "istextext", "rawmadetext", "validtexbox", "onetimetextext", "rawfmttext", "thefmttext", "fmttext", "onetimefmttext", "notcached", "keepcached", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "withopacity", "property", "properties", "withproperties", "asgroup", "withpattern", "withpatternscale", "withpatternfloat", "infont", "space", "crlf", "dquote", "percent", "SPACE", "CRLF", "DQUOTE", "PERCENT", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "colorlike", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedfunction", "constructedpath", "constructedpairs", "straightfunction", "straightpath", "straightpairs", "curvedfunction", "curvedpath", "curvedpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "resetarrows", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "basiccolors", "complementary", "complemented", "resolvedcolor", "normalfill", "normaldraw", "visualizepaths", "detailpaths", "naturalizepaths", "drawboundary", "drawwholepath", "drawpathonly", "visualizeddraw", "visualizedfill", "detaileddraw", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "pencilled", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "topair", "format", "formatted", "quotation", "quote", "startpassingvariable", "stoppassingvariable", "eofill", "eoclip", "nofill", "dofill", "fillup", "eofillup", "nodraw", "dodraw", "area", "addbackground", "shadedup", "shadeddown", "shadedleft", "shadedright", "sortlist", "copylist", "shapedlist", "listtocurves", "listtolines", "listsize", "listlast", "uniquelist", "circularpath", "squarepath", "linearpath", "theoffset", "texmode", "systemmode", "texvar", "texstr", "isarray", "prefix", "dimension", "getmacro", "getdimen", "getcount", "gettoks", "setmacro", "setdimen", "setcount", "settoks", "setglobalmacro", "setglobaldimen", "setglobalcount", "setglobaltoks", "positionpath", "positioncurve", "positionxy", "positionparagraph", "positioncolumn", "positionwhd", "positionpage", "positionregion", "positionbox", "positionx", "positiony", "positionanchor", "positioninregion", "positionatanchor", "getposboxes", "getmultipars", "getpospage", "getposparagraph", "getposcolumn", "getposregion", "getposx", "getposy", "getposwidth", "getposheight", "getposdepth", "getposleftskip", "getposrightskip", "getposhsize", "getposparindent", "getposhangindent", "getposhangafter", "getposxy", "getposupperleft", "getposlowerleft", "getposupperright", "getposlowerright", "getposllx", "getposlly", "getposurx", "getposury", "wdpart", "htpart", "dppart", "texvar", "texstr", "inpath", "pointof", "leftof", "rightof", "utfnum", "utflen", "utfsub", "newhash", "disposehash", "inhash", "tohash", "fromhash", "isarray", "prefix", "isobject", "comment", "report", "lua", "lualist", "mp", "MP", "luacall", "mirrored", "mirroredabout", "scriptindex", "newscriptindex", "newcolor", "newrgbcolor", "newcmykcolor", "newnumeric", "newboolean", "newtransform", "newpath", "newpicture", "newstring", "newpair", "mpvard", "mpvarn", "mpvars", "mpvar", "withtolerance" },
- ["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "shadeoffset", "textextoffset", "textextanchor", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", "ahvariant", "ahdimple", "ahfactor", "ahscale", "metapostversion", "maxdimensions", "drawoptionsfactor", "dq", "sq", "crossingscale", "crossingoption", "contextlmtxmode", "metafunversion", "minifunversion", "getparameters", "presetparameters", "hasparameter", "hasoption", "getparameter", "getparameterdefault", "getparametercount", "getmaxparametercount", "getparameterpath", "getparameterpen", "getparametertext", "applyparameters", "pushparameters", "popparameters", "setluaparameter", "definecolor", "record", "newrecord", "setrecord", "getrecord", "anchorxy", "anchorx", "anchory", "anchorht", "anchordp", "anchorul", "anchorll", "anchorlr", "anchorur", "localanchorbox", "localanchorcell", "localanchorspan", "anchorbox", "anchorcell", "anchorspan", "matrixbox", "matrixcell", "matrixspan" },
-} \ No newline at end of file
diff --git a/context/data/textadept/context/data/scite-context-data-metapost.lua b/context/data/textadept/context/data/scite-context-data-metapost.lua
deleted file mode 100644
index cb376cc87..000000000
--- a/context/data/textadept/context/data/scite-context-data-metapost.lua
+++ /dev/null
@@ -1,9 +0,0 @@
-return {
- ["commands"]={ "on", "off", "interpath", "upto", "downto", "beginfig", "endfig", "beginglyph", "endglyph", "beginfont", "endfont", "rotatedaround", "reflectedabout", "arrowhead", "currentpen", "currentpicture", "cuttings", "defaultfont", "extra_beginfig", "extra_endfig", "down", "evenly", "fullcircle", "halfcircle", "identity", "in", "left", "pensquare", "penrazor", "penspec", "origin", "quartercircle", "right", "unitsquare", "up", "withdots", "abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir", "directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod", "round", "unitvector", "whatever", "cutdraw", "draw", "drawarrow", "drawdblarrow", "fill", "filldraw", "drawdot", "loggingall", "interact", "tracingall", "tracingnone", "pickup", "undraw", "unfill", "unfilldraw", "buildcycle", "dashpattern", "decr", "dotlabel", "dotlabels", "drawoptions", "incr", "label", "labels", "max", "min", "thelabel", "z", "beginchar", "blacker", "capsule_end", "change_width", "define_blacker_pixels", "define_corrected_pixels", "define_good_x_pixels", "define_good_y_pixels", "define_horizontal_corrected_pixels", "define_pixels", "define_whole_blacker_pixels", "define_whole_pixels", "define_whole_vertical_blacker_pixels", "define_whole_vertical_pixels", "endchar", "extra_beginchar", "extra_endchar", "extra_setup", "font_coding_scheme", "clearxy", "clearit", "clearpen", "shipit", "font_extra_space", "exitunless", "relax", "hide", "gobble", "gobbled", "stop", "blankpicture", "counterclockwise", "tensepath", "takepower", "direction", "softjoin", "makelabel", "rotatedabout", "flex", "superellipse", "image", "nullpen", "savepen", "clearpen", "penpos", "penlabels", "range", "thru", "z", "laboff", "bye", "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background", "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in", "dk", "triplet", "quadruplet", "totransform", "bymatrix", "closedcurve", "closedlines", "primitive", "permanent", "immutable", "mutable", "frozen", "showproperty", "showhashentry" },
- ["disabled"]={ "verbatimtex", "troffmode" },
- ["internals"]={ "mitered", "rounded", "beveled", "butt", "squared", "eps", "epsilon", "infinity", "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius", "charscale", "inicatcoderegime", "texcatcoderegime", "luacatcoderegime", "notcatcoderegime", "vrbcatcoderegime", "prtcatcoderegime", "ctxcatcoderegime", "txtcatcoderegime", "catcoderegime", "ditto", "EOF", "pen_lft", "pen_rt", "pen_top", "pen_bot" },
- ["metafont"]={ "autorounding", "beginchar", "blacker", "boundarychar", "capsule_def", "capsule_end", "change_width", "chardp", "chardx", "chardy", "charexists", "charext", "charht", "charic", "charlist", "charwd", "cull", "cullit", "currenttransform", "currentwindow", "define_blacker_pixels", "define_corrected_pixels", "define_good_x_pixels", "define_good_y_pixels", "define_horizontal_corrected_pixels", "define_pixels", "define_whole_blacker_pixels", "define_whole_pixels", "define_whole_vertical_blacker_pixels", "define_whole_vertical_pixels", "designsize", "display", "displaying", "endchar", "extensible", "extra_beginchar", "extra_endchar", "extra_setup", "fillin", "font_coding_scheme", "font_extra_space", "font_identifier", "font_normal_shrink", "font_normal_space", "font_normal_stretch", "font_quad", "font_size", "font_slant", "font_x_height", "fontdimen", "fontmaking", "gfcorners", "granularity", "grayfont", "headerbyte", "hppp", "hround", "imagerules", "italcorr", "kern", "labelfont", "ligtable", "lowres_fix", "makebox", "makegrid", "maketicks", "mode_def", "mode_setup", "nodisplays", "notransforms", "numspecial", "o_correction", "openit", "openwindow", "pixels_per_inch", "proofing", "proofoffset", "proofrule", "proofrulethickness", "rulepen", "screenchars", "screenrule", "screenstrokes", "screen_cols", "screen_rows", "showit", "slantfont", "smode", "smoothing", "titlefont", "totalweight", "tracingedges", "tracingpens", "turningcheck", "unitpixel", "vppp", "vround", "xoffset", "yoffset" },
- ["primitives"]={ "charcode", "day", "linecap", "linejoin", "miterlimit", "stacking", "month", "pausing", "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset", "tracingcommands", "tracingequations", "tracinglostchars", "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores", "tracingspecs", "tracingstats", "tracingtitles", "truecorners", "warningcheck", "year", "false", "nullpicture", "pencircle", "penspec", "true", "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot", "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize", "hex", "infont", "intersectiontimes", "known", "void", "length", "llcorner", "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not", "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point", "postcontrol", "precontrol", "reverse", "rotated", "scaled", "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring", "transform", "transformed", "ulcorner", "uniformdeviate", "unknown", "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart", "yypart", "zscaled", "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds", "setgroup", "shipout", "show", "showdependencies", "showtoken", "showvariable", "special", "begingroup", "endgroup", "of", "curl", "tension", "and", "controls", "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary", "tertiary", "primarydef", "secondarydef", "tertiarydef", "randomseed", "also", "contour", "doublepath", "withcolor", "withcmykcolor", "withpen", "withstacking", "dashed", "envelope", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within", "forsuffixes", "step", "until", "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable", "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize", "fontmaking", "charexists", "cullit", "currenttransform", "gfcorners", "grayfont", "hround", "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit", "displaying", "currentwindow", "screen_rows", "screen_cols", "pixels_per_inch", "cull", "display", "openwindow", "numspecial", "totalweight", "autorounding", "fillin", "proofing", "tracingpens", "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset", "chardy", "hppp", "tracingedges", "vppp", "extra_beginfig", "extra_endfig", "mpxbreak", "endinput", "message", "delimiters", "turningnumber", "errmessage", "scantokens", "end", "outer", "inner", "write", "to", "readfrom", "closefrom", "withprescript", "withpostscript", "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt", "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "prescriptpart", "postscriptpart", "rgbcolor", "cmykcolor", "colormodel", "graypart", "greypart", "greycolor", "graycolor", "dashpart", "penpart", "stackingpart", "stroked", "filled", "textual", "clipped", "bounded", "pathpart", "expandafter", "minute", "hour", "outputformat", "outputtemplate", "filenametemplate", "fontmapfile", "fontmapline", "fontpart", "fontsize", "glyph", "restoreclipcolor", "troffmode", "runscript", "maketext", "numbersystem", "overloadmode", "setproperty" },
- ["shortcuts"]={ "..", "...", "--", "---", "&", "\\" },
- ["tex"]={ "btex", "etex", "verbatimtex" },
-} \ No newline at end of file
diff --git a/context/data/textadept/context/data/scite-context-data-tex.lua b/context/data/textadept/context/data/scite-context-data-tex.lua
deleted file mode 100644
index b02efe91e..000000000
--- a/context/data/textadept/context/data/scite-context-data-tex.lua
+++ /dev/null
@@ -1,9 +0,0 @@
-return {
- ["aleph"]={ "Alephminorversion", "Alephrevision", "Alephversion" },
- ["etex"]={ "botmarks", "clubpenalties", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "detokenize", "dimexpr", "displaywidowpenalties", "everyeof", "firstmarks", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "ifcsname", "ifdefined", "iffontchar", "interactionmode", "interlinepenalties", "lastlinefit", "lastnodetype", "marks", "muexpr", "mutoglue", "numexpr", "pagediscards", "parshapedimen", "parshapeindent", "parshapelength", "predisplaydirection", "protected", "savinghyphcodes", "savingvdiscards", "scantokens", "showgroups", "showifs", "showtokens", "splitbotmarks", "splitdiscards", "splitfirstmarks", "topmarks", "tracingassigns", "tracinggroups", "tracingifs", "tracinglevels", "tracingnesting", "unexpanded", "unless", "widowpenalties" },
- ["luatex"]={ "UUskewed", "UUskewedwithdelims", "Uabove", "Uabovewithdelims", "Uatop", "Uatopwithdelims", "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Uleft", "Umathaccent", "Umathaccentbaseheight", "Umathaccentvariant", "Umathadapttoleft", "Umathadapttoright", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathbotaccentvariant", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclass", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathdegreevariant", "Umathdelimiterovervariant", "Umathdelimiterundervariant", "Umathdenominatorvariant", "Umathextrasubpreshift", "Umathextrasubshift", "Umathextrasuppreshift", "Umathextrasupshift", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathfractionvariant", "Umathhextensiblevariant", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathlimits", "Umathnoaxis", "Umathnolimits", "Umathnolimitsubfactor", "Umathnolimitsupfactor", "Umathnumeratorvariant", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathopenupdepth", "Umathopenupheight", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervariant", "Umathoverdelimitervgap", "Umathoverlayaccentvariant", "Umathoverlinevariant", "Umathphantom", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvariant", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathspacebeforescript", "Umathspacingmode", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvariant", "Umathstackvgap", "Umathsubscriptvariant", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsuperscriptvariant", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathtopaccentvariant", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervariant", "Umathunderdelimitervgap", "Umathunderlinevariant", "Umathvextensiblevariant", "Umathvoid", "Umiddle", "Unosubprescript", "Unosubscript", "Unosuperprescript", "Unosuperscript", "Uover", "Uoverdelimiter", "Uoverwithdelims", "Uradical", "Uright", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Ustyle", "Usubprescript", "Usubscript", "Usuperprescript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "adjustspacing", "adjustspacingshrink", "adjustspacingstep", "adjustspacingstretch", "afterassigned", "aftergrouped", "aliased", "alignmark", "aligntab", "atendofgroup", "atendofgrouped", "attribute", "attributedef", "automaticdiscretionary", "automatichyphenpenalty", "automigrationmode", "autoparagraphmode", "begincsname", "beginlocalcontrol", "boundary", "boxattribute", "boxdirection", "boxorientation", "boxtotal", "boxxmove", "boxxoffset", "boxymove", "boxyoffset", "catcodetable", "clearmarks", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "csstring", "currentmarks", "defcsname", "dimensiondef", "dimexpression", "directlua", "edefcsname", "efcode", "endlocalcontrol", "enforced", "etoksapp", "etokspre", "everybeforepar", "everytab", "exceptionpenalty", "expand", "expandafterpars", "expandafterspaces", "expandcstoken", "expanded", "expandtoken", "explicitdiscretionary", "explicithyphenpenalty", "firstvalidlanguage", "flushmarks", "fontid", "fontmathcontrol", "fontspecifiedsize", "fonttextcontrol", "formatname", "frozen", "futurecsname", "futuredef", "futureexpand", "futureexpandis", "futureexpandisap", "gdefcsname", "gleaders", "glet", "gletcsname", "glettonothing", "gluespecdef", "glyphdatafield", "glyphoptions", "glyphscale", "glyphscriptfield", "glyphscriptscale", "glyphscriptscriptscale", "glyphstatefield", "glyphtextscale", "glyphxoffset", "glyphxscale", "glyphyoffset", "glyphyscale", "gtoksapp", "gtokspre", "hccode", "hjcode", "hpack", "hyphenationmin", "hyphenationmode", "ifabsdim", "ifabsnum", "ifarguments", "ifboolean", "ifchkdim", "ifchknum", "ifcmpdim", "ifcmpnum", "ifcondition", "ifcstok", "ifdimexpression", "ifdimval", "ifempty", "ifflags", "ifhaschar", "ifhastok", "ifhastoks", "ifhasxtoks", "ifincsname", "ifinsert", "ifmathparameter", "ifmathstyle", "ifnumexpression", "ifnumval", "ifparameter", "ifparameters", "ifrelax", "iftok", "ignorearguments", "ignorepars", "immediate", "immutable", "initcatcodetable", "insertbox", "insertcopy", "insertdepth", "insertdistance", "insertheight", "insertheights", "insertlimit", "insertmaxdepth", "insertmode", "insertmultiplier", "insertpenalty", "insertprogress", "insertstorage", "insertstoring", "insertunbox", "insertuncopy", "insertwidth", "instance", "integerdef", "lastarguments", "lastchkdim", "lastchknum", "lastnamedcs", "lastnodesubtype", "lastparcontext", "leftmarginkern", "letcharcode", "letcsname", "letfrozen", "letprotected", "lettonothing", "linedirection", "localbrokenpenalty", "localcontrol", "localcontrolled", "localinterlinepenalty", "localleftbox", "localrightbox", "lpcode", "luabytecode", "luabytecodecall", "luacopyinputnodes", "luadef", "luaescapestring", "luafunction", "luafunctioncall", "luatexbanner", "luatexrevision", "luatexversion", "mathcontrolmode", "mathdelimitersmode", "mathdirection", "mathdisplayskipmode", "matheqnogapstep", "mathflattenmode", "mathfontcontrol", "mathitalicsmode", "mathnolimitsmode", "mathpenaltiesmode", "mathrulesfam", "mathrulesmode", "mathrulethicknessmode", "mathscale", "mathscriptboxmode", "mathscriptcharmode", "mathscriptsmode", "mathstyle", "mathsurroundmode", "mathsurroundskip", "mugluespecdef", "mutable", "noaligned", "noboundary", "nohrule", "norelax", "normalizelinemode", "nospaces", "novrule", "numericscale", "numexpression", "orelse", "orunless", "outputbox", "overloaded", "overloadmode", "pageboundary", "pagevsize", "parametercount", "parametermark", "parattribute", "pardirection", "permanent", "postexhyphenchar", "posthyphenchar", "prebinoppenalty", "predisplaygapfactor", "preexhyphenchar", "prehyphenchar", "prerelpenalty", "protrudechars", "protrusionboundary", "pxdimen", "quitvmode", "retokenized", "rightmarginkern", "rpcode", "savecatcodetable", "scantextokens", "semiexpanded", "semiprotected", "setfontid", "snapshotpar", "supmarkmode", "swapcsvalues", "tabsize", "textdirection", "thewithoutunit", "tokenized", "toksapp", "tokspre", "tolerant", "tpack", "tracingalignments", "tracingexpressions", "tracingfonts", "tracinghyphenation", "tracinginserts", "tracingmarks", "tracingmath", "undent", "unletfrozen", "unletprotected", "untraced", "vpack", "wordboundary", "wrapuppar", "xdefcsname", "xtoksapp", "xtokspre" },
- ["omega"]={ "Omegaminorversion", "Omegarevision", "Omegaversion" },
- ["pdftex"]={ "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfignoreunknownimages", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinfoomitdate", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmajorversion", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfomitcharset", "pdfomitcidset", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkfixeddpi", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrecompress", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdfsuppressoptionalinfo", "pdfsuppressptexinfo", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdftrailerid", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformmargin", "pdfxformname", "pdfxformresources", "pdfximage" },
- ["tex"]={ " ", "-", "/", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "aligncontent", "atop", "atopwithdelims", "badness", "baselineskip", "batchmode", "begingroup", "beginsimplegroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "botmark", "box", "boxmaxdepth", "brokenpenalty", "catcode", "char", "chardef", "cleaders", "clubpenalty", "copy", "count", "countdef", "cr", "crcr", "csname", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "dimen", "dimendef", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "edef", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "endsimplegroup", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "fam", "fi", "finalhyphendemerits", "firstmark", "floatingpenalty", "font", "fontdimen", "fontname", "fontspecifiedname", "futurelet", "gdef", "global", "globaldefs", "glyph", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifcase", "ifcat", "ifdim", "iffalse", "ifhbox", "ifhmode", "ifinner", "ifmmode", "ifnum", "ifodd", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignorespaces", "indent", "input", "inputlineno", "insert", "insertpenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastpenalty", "lastskip", "lccode", "leaders", "left", "lefthyphenmin", "leftskip", "leqno", "let", "limits", "linepenalty", "lineskip", "lineskiplimit", "long", "looseness", "lower", "lowercase", "mark", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathinner", "mathop", "mathopen", "mathord", "mathpunct", "mathrel", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "meaningfull", "meaningless", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "multiply", "muskip", "muskipdef", "newlinechar", "noalign", "noexpand", "noindent", "nolimits", "nonscript", "nonstopmode", "nulldelimiterspace", "nullfont", "number", "omit", "or", "outer", "output", "outputpenalty", "over", "overfullrule", "overline", "overshoot", "overwithdelims", "pagedepth", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageshrink", "pagestretch", "pagetotal", "par", "parfillleftskip", "parfillskip", "parindent", "parshape", "parskip", "patterns", "pausing", "penalty", "postdisplaypenalty", "predisplaypenalty", "predisplaysize", "pretolerance", "prevdepth", "prevgraf", "radical", "raise", "relax", "relpenalty", "right", "righthyphenmin", "rightskip", "romannumeral", "scaledfontdimen", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setlanguage", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showlists", "shownodedetails", "showthe", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "splitbotmark", "splitfirstmark", "splitmaxdepth", "splittopskip", "string", "tabskip", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "todimension", "tointeger", "toks", "toksdef", "tolerance", "topmark", "topskip", "toscaled", "tracingcommands", "tracinglostchars", "tracingmacros", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingstats", "uccode", "uchyph", "underline", "unhbox", "unhcopy", "unhpack", "unkern", "unpenalty", "unskip", "unvbox", "unvcopy", "unvpack", "uppercase", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalty", "xdef", "xleaders", "xspaceskip", "year" },
- ["xetex"]={ "XeTeXversion" },
-} \ No newline at end of file
diff --git a/context/data/textadept/context/init.lua b/context/data/textadept/context/init.lua
deleted file mode 100644
index d5da25fc8..000000000
--- a/context/data/textadept/context/init.lua
+++ /dev/null
@@ -1,147 +0,0 @@
-local info = {
- version = 1.002,
- comment = "ini for textadept for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- Note for myself: I need to check the latest greatest textadept and also see if
--- the lpeg lexer interface has been ported to the latest scite. If not I need to
--- come up with a backup plan (vscode?).
-
-if not textadept then
- return
-end
-
--- The textadept documentation says that there can be a lexers directory under a user
--- directory but it's not in the package path. The next involved a bit or trial and
--- error in order to avoid crashes so I suppose it can be done better. If I use
--- textadept alongside scite I will make a different key binding. The code below is
--- a bit of a mess, which is a side effect of stepwise adaption combined with shared
--- iuse of code.
---
--- We use the commandline switch -u to point to the location where this file is located
--- as we then can keep it outside the program area. We also put some other files under
--- themes.
---
--- A problem is that scite needs the lexer.lua file while for textadept we don't want
--- to touch that one. So we end up with duplicate files. We cannot configure scite to
--- use an explicit lexer so both lexer paths have the same files except that the textadept
--- one has no lexer.lua there. Unfortunately themes is not requires's but always looked
--- up with an explicit path. (Maybe I should patch that.)
---
--- We are in one of:
---
--- tex/texmf-context/context/data/textadept/context
--- data/develop/context/scite/data/context/textadept
-
-package.path = table.concat ( {
- --
- _USERHOME .. "/?.lua",
- --
- _USERHOME .. "/lexers/?.lua",
- _USERHOME .. "/modules/?.lua",
- _USERHOME .. "/themes/?.lua",
- _USERHOME .. "/data/?.lua",
- --
- package.path
- --
-}, ';')
-
--- We now reset the session location to a writeable user area. We also take the opportunity
--- to increase the list.
-
-local sessionpath = os.getenv(not WIN32 and 'HOME' or 'USERPROFILE') .. '/.textadept'
-local sessionfile = not CURSES and 'session' or 'session_term'
-
-textadept.session.default_session = sessionpath .. "/" .. sessionfile
-textadept.session.save_on_quit = true
-textadept.session.max_recent_files = 25
-
--- Let's load our adapted lexer framework.
-
-require("scite-context-lexer")
-require("textadept-context-runner")
-require("textadept-context-files")
-require("scite-context-theme")
-require("textadept-context-settings")
-require("textadept-context-types")
-
--- This prevents other themes to spoil our settings.
-
--- ui.set_theme("scite-context-theme")
-buffer:set_theme("scite-context-theme")
-
--- Since version 10 there is some settings stuff in the main init file but that
--- crashes on load_settings. It has to do with the replacement of properties
--- but we already had that replaced for a while. There is some blob made that
--- gets loaded but it's not robust (should be done different I think). Anyway,
--- intercepting the two event handlers is easiest. Maybe some day I will
--- replace that init anyway (if these fundamentals keep changing between
--- versions.)
---
--- I admit that it's not a beautiful solution but it works ok and I already
--- spent too much time figuring things out anyway.
-
-local events_connect = events.connect
-
-local function events_newbuffer()
- local buffer = _G.buffer
- local SETDIRECTFUNCTION = _SCINTILLA.properties.direct_function[1]
- local SETDIRECTPOINTER = _SCINTILLA.properties.doc_pointer[2]
- local SETLUASTATE = _SCINTILLA.functions.change_lexer_state[1]
- local SETLEXERLANGUAGE = _SCINTILLA.properties.lexer_language[2]
- buffer.lexer_language = 'lpeg'
- buffer:private_lexer_call(SETDIRECTFUNCTION, buffer.direct_function)
- buffer:private_lexer_call(SETDIRECTPOINTER, buffer.direct_pointer)
- buffer:private_lexer_call(SETLUASTATE, _LUA)
- buffer.property['lexer.lpeg.home'] = _USERHOME..'/lexers/?.lua;'.. _HOME..'/lexers'
- -- load_settings()
- buffer:private_lexer_call(SETLEXERLANGUAGE, 'text')
- if buffer == ui.command_entry then
- buffer.caret_line_visible = false
- end
-end
-
--- Why these resets:
-
-local ctrl_keys = {
- '[', ']', '/', '\\', 'Z', 'Y', 'X', 'C', 'V', 'A', 'L', 'T', 'D', 'U'
-}
-
-local ctrl_shift_keys = {
- 'L', 'T', 'U', 'Z'
-}
-
-local function events_newview()
- local buffer = _G.buffer
- for i=1, #ctrl_keys do
- buffer:clear_cmd_key(string.byte(ctrl_keys[i]) | buffer.MOD_CTRL << 16)
- end
- for i=1, #ctrl_shift_keys do
- buffer:clear_cmd_key(string.byte(ctrl_shift_keys[i]) | (buffer.MOD_CTRL | buffer.MOD_SHIFT) << 16)
- end
- if #_VIEWS > 1 then
- -- load_settings()
- local SETLEXERLANGUAGE = _SCINTILLA.properties.lexer_language[2]
- buffer:private_lexer_call(SETLEXERLANGUAGE, buffer._lexer or 'text')
- end
-end
-
-events.connect = function(where,what,location)
- if location == 1 then
- if where == events.BUFFER_NEW then
- return events_connect(where,events_newbuffer,location)
- elseif where == events.VIEW_NEW then
- return events_connect(where,events_newview,location)
- end
- end
- return events_connect(where,what,location)
-end
-
-local savedrequire = require
-
-require = function(name,...)
- return savedrequire(name == "lexer" and "scite-context-lexer" or name,...)
-end
diff --git a/context/data/textadept/context/lexers/lexer.lua b/context/data/textadept/context/lexers/lexer.lua
deleted file mode 100644
index 289697b72..000000000
--- a/context/data/textadept/context/lexers/lexer.lua
+++ /dev/null
@@ -1,2686 +0,0 @@
-local info = {
- version = 1.400,
- comment = "basics for scintilla lpeg lexer for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
- comment = "contains copyrighted code from mitchell.att.foicica.com",
-
-}
-
--- We need a copy of this file to lexer.lua in the same path. This was not needed
--- before version 10 but I can't figure out what else to do. It looks like there
--- is some loading of lexer.lua but I can't see where.
-
--- For a while it looked like we're stuck with scite 3 because there would be no
--- update of scintillua for the newer versions (c++ changes) but now it looks that
--- there will be updates (2021). There is a dll for scite >= 5 but it doesn't
--- work (yet). In version 5.20+ the scintillua dll makes scite crash (alsl when I
--- use the recommended import). In an early 5.02 loading the (shipped) lpeg lexer
--- does nothing at all. There have been changes in the lua interface too but I need
--- to compare the old and new lib. For now I gave up and got back to version 3+. It
--- would be nice if error messages would go to the log pane so that wget an idea
--- what happens. After all the code involved (below) is not that much and not that
--- complex either.
---
--- Actually, scite 5.22 also crashed when a program was launched so better wait
--- for a while. (In the worst case, when it all stops working, we need to migrate
--- to visual code, which is out backup/fallback plan.) I didn't test if the latest
--- textadept still works with our lexer variant. In the meantime that editor has
--- grown to some 30 MB so it is no longer a lightweight option (scite with scintilla
--- is still quite small).
-
-if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
-
-local log = false
-local trace = false
-local detail = false
-local show = false -- nice for tracing (also for later)
-local collapse = false -- can save some 15% (maybe easier on scintilla)
-local inspect = false -- can save some 15% (maybe easier on scintilla)
-
--- local log = true
--- local trace = true
-
--- GET GOING
---
--- You need to copy this file over lexer.lua. In principle other lexers could work
--- too but not now. Maybe some day. All patterns will move into the patterns name
--- space. I might do the same with styles. If you run an older version of SciTE you
--- can take one of the archives. Pre 3.41 versions can just be copied to the right
--- path, as there we still use part of the normal lexer. Below we mention some
--- issues with different versions of SciTE. We try to keep up with changes but best
--- check careful if the version that yuou install works as expected because SciTE
--- and the scintillua dll need to be in sync.
---
--- REMARK
---
--- We started using lpeg lexing as soon as it came available. Because we had rather
--- demanding files and also wanted to use nested lexers, we ended up with our own
--- variant. At least at that time this was more robust and also much faster (as we
--- have some pretty large Lua data files and also work with large xml files). As a
--- consequence successive versions had to be adapted to changes in the (at that time
--- still unstable) api. In addition to lexing we also have spell checking and such.
--- Around version 3.60 things became more stable so I don't expect to change much.
---
--- LEXING
---
--- When pc's showed up we wrote our own editor (texedit) in MODULA 2. It was fast,
--- had multiple overlapping (text) windows, could run in the at most 1M memory at
--- that time, etc. The realtime file browsing with lexing that we had at that time
--- is still on my current wish list. The color scheme and logic that we used related
--- to the logic behind the ConTeXt user interface that evolved.
---
--- Later I rewrote the editor in perl/tk. I don't like the perl syntax but tk
--- widgets are very powerful and hard to beat. In fact, TextAdept reminds me of
--- that: wrap your own interface around a framework (tk had an edit control that one
--- could control completely not that different from scintilla). Last time I checked
--- it still ran fine so I might try to implement something like its file handling in
--- TextAdept.
---
--- In the end I settled for SciTE for which I wrote TeX and MetaPost lexers that
--- could handle keyword sets. With respect to lexing (syntax highlighting) ConTeXt
--- has a long history, if only because we need it for manuals. Anyway, in the end we
--- arrived at lpeg based lexing (which is quite natural as we have lots of lpeg
--- usage in ConTeXt). The basic color schemes haven't changed much. The most
--- prominent differences are the nested lexers.
---
--- In the meantime I made the lexer suitable for typesetting sources which was no
--- big deal as we already had that in place (ConTeXt used lpeg from the day it
--- showed up so we have several lexing options there too).
---
--- Keep in mind that in ConTeXt (typesetting) lexing can follow several approaches:
--- line based (which is handy for verbatim mode), syntax mode (which is nice for
--- tutorials), and tolerant mode (so that one can also show bad examples or errors).
--- These demands can clash.
---
--- HISTORY
---
--- The remarks below are more for myself so that I keep track of changes in the
--- way we adapt to the changes in the scintillua and scite.
---
--- The fold and lex functions are copied and patched from original code by Mitchell
--- (see lexer.lua) in the scintillua distribution. So whatever I say below, assume
--- that all errors are mine. The ability to use lpeg in scintilla is a real nice
--- addition and a brilliant move. The code is a byproduct of the (mainly Lua based)
--- TextAdept which at the time I ran into it was a rapidly moving target so I
--- decided to stick ot SciTE. When I played with it, it had no realtime output pane
--- although that seems to be dealt with now (2017). I need to have a look at it in
--- more detail but a first test again made the output hang and it was a bit slow too
--- (and I also want the log pane as SciTE has it, on the right, in view). So, for
--- now I stick to SciTE even when it's somewhat crippled by the fact that we cannot
--- hook our own (language dependent) lexer into the output pane (somehow the
--- errorlist lexer is hard coded into the editor). Hopefully that will change some
--- day. The ConTeXt distribution has cmd runner for textdept that will plug in the
--- lexers discussed here as well as a dedicated runner. Considere it an experiment.
---
--- The basic code hasn't changed much but we had to adapt a few times to changes in
--- the api and/or work around bugs. Starting with SciTE version 3.20 there was an
--- issue with coloring. We still lacked a connection with SciTE itself (properties
--- as well as printing to the log pane) and we could not trace this (on windows).
--- However on unix we can see messages! As far as I can see, there are no
--- fundamental changes in lexer.lua or LexLPeg.cxx so it must be/have been in
--- Scintilla itself. So we went back to 3.10. Indicators of issues are: no lexing of
--- 'next' and 'goto <label>' in the Lua lexer and no brace highlighting either.
--- Interesting is that it does work ok in the cld lexer (so the Lua code is okay).
--- All seems to be ok again in later versions, so, when you update best check first
--- and just switch back to an older version as normally a SciTE update is not
--- critital. When char-def.lua lexes real fast this is a signal that the lexer quits
--- somewhere halfway. Maybe there are some hard coded limitations on the amount of
--- styles and/or length of names.
---
--- Anyway, after checking 3.24 and adapting to the new lexer tables things are okay
--- again. So, this version assumes 3.24 or higher. In 3.24 we have a different token
--- result, i.e. no longer a { tag, pattern } but just two return values. I didn't
--- check other changes but will do that when I run into issues. I had already
--- optimized these small tables by hashing which was much more efficient (and maybe
--- even more efficient than the current approach) but this is no longer needed. For
--- the moment we keep some of that code around as I don't know what happens in
--- future versions. I'm anyway still happy with this kind of lexing.
---
--- In 3.31 another major change took place: some helper constants (maybe they're no
--- longer constants) and functions were moved into the lexer modules namespace but
--- the functions are assigned to the Lua module afterward so we cannot alias them
--- beforehand. We're probably getting close to a stable interface now. At that time
--- for the first time I considered making a whole copy and patch the other functions
--- too as we need an extra nesting model. However, I don't want to maintain too
--- much. An unfortunate change in 3.03 is that no longer a script can be specified.
--- This means that instead of loading the extensions via the properties file, we now
--- need to load them in our own lexers, unless of course we replace lexer.lua
--- completely (which adds another installation issue).
---
--- Another change has been that _LEXERHOME is no longer available. It looks like
--- more and more functionality gets dropped so maybe at some point we need to ship
--- our own dll/so files. For instance, I'd like to have access to the current
--- filename and other SciTE properties. We could then cache some info with each
--- file, if only we had knowledge of what file we're dealing with. This all makes a
--- nice installation more complex and (worse) makes it hard to share files between
--- different editors usign s similar directory structure.
---
--- For huge files folding can be pretty slow and I do have some large ones that I
--- keep open all the time. Loading is normally no ussue, unless one has remembered
--- the status and the cursor is at the last line of a 200K line file. Optimizing the
--- fold function brought down loading of char-def.lua from 14 sec => 8 sec.
--- Replacing the word_match function and optimizing the lex function gained another
--- 2+ seconds. A 6 second load is quite ok for me. The changed lexer table structure
--- (no subtables) brings loading down to a few seconds.
---
--- When the lexer path is copied to the TextAdept lexer path, and the theme
--- definition to theme path (as lexer.lua), the lexer works there as well. Although
--- ... when I decided to check the state of TextAdept I had to adapt some loader
--- code. The solution is not pretty but works and also permits overloading. When I
--- have time and motive I will make a proper setup file to tune the look and feel a
--- bit more than we do now. The TextAdept editor nwo has tabs and a console so it
--- has become more useable for me (it's still somewhat slower than SciTE).
--- Interesting is that the jit version of TextAdept crashes on lexing large files
--- (and does not feel faster either; maybe a side effect of known limitations as we
--- know that Luajit is more limited than stock Lua).
---
--- Function load(lexer_name) starts with _lexers.WHITESPACE = lexer_name ..
--- '_whitespace' which means that we need to have it frozen at the moment we load
--- another lexer. Because spacing is used to revert to a parent lexer we need to
--- make sure that we load children as late as possible in order not to get the wrong
--- whitespace trigger. This took me quite a while to figure out (not being that
--- familiar with the internals). The lex and fold functions have been optimized. It
--- is a pitty that there is no proper print available. Another thing needed is a
--- default style in our own theme style definition, as otherwise we get wrong nested
--- lexers, especially if they are larger than a view. This is the hardest part of
--- getting things right.
---
--- It's a pitty that there is no scintillua library for the OSX version of SciTE.
--- Even better would be to have the scintillua library as integral part of SciTE as
--- that way I could use OSX alongside windows and linux (depending on needs). Also
--- nice would be to have a proper interface to SciTE then because currently the
--- lexer is rather isolated and the Lua version does not provide all standard
--- libraries. It would also be good to have lpeg support in the regular SciTE Lua
--- extension (currently you need to pick it up from someplace else). I keep hoping.
---
--- With 3.41 the interface changed again so it became time to look into the C++ code
--- and consider compiling and patching myself, something that I like to avoid.
--- Loading is more complicated now as the lexer gets loaded automatically so we have
--- little control over extending the code now. After a few days trying all kind of
--- solutions I decided to follow a different approach: drop in a complete
--- replacement. This of course means that I need to keep track of even more changes
--- (which for sure will happen) but at least I get rid of interferences. Till 3.60
--- the api (lexing and configuration) was simply too unstable across versions which
--- is a pitty because we expect authors to install SciTE without hassle. Maybe in a
--- few years things will have stabelized. Maybe it's also not really expected that
--- one writes lexers at all. A side effect is that I now no longer will use shipped
--- lexers for languages that I made no lexer for, but just the built-in ones in
--- addition to the ConTeXt lpeg lexers. Not that it matters much as the ConTeXt
--- lexers cover what I need (and I can always write more). For editing TeX files one
--- only needs a limited set of lexers (TeX, MetaPost, Lua, BibTeX, C/W, PDF, SQL,
--- etc). I can add more when I want.
---
--- In fact, the transition to 3.41 was triggered by an unfateful update of Ubuntu
--- which left me with an incompatible SciTE and lexer library and updating was not
--- possible due to the lack of 64 bit libraries. We'll see what the future brings.
--- For now I can use SciTE under wine on linux. The fact that scintillua ships
--- independently is a showstopper.
---
--- Promissing is that the library now can use another Lua instance so maybe some day
--- it will get properly in SciTE and we can use more clever scripting.
---
--- In some lexers we use embedded ones even if we could do it directly, The reason
--- is that when the end token is edited (e.g. -->), backtracking to the space before
--- the begin token (e.g. <!--) results in applying the surrounding whitespace which
--- in turn means that when the end token is edited right, backtracking doesn't go
--- back. One solution (in the dll) would be to backtrack several space categories.
--- After all, lexing is quite fast (applying the result is much slower).
---
--- For some reason the first blob of text tends to go wrong (pdf and web). It would
--- be nice to have 'whole doc' initial lexing. Quite fishy as it makes it impossible
--- to lex the first part well (for already opened documents) because only a partial
--- text is passed.
---
--- So, maybe I should just write this from scratch (assuming more generic usage)
--- because after all, the dll expects just tables, based on a string. I can then
--- also do some more aggressive resource sharing (needed when used generic).
---
--- I think that nested lexers are still bugged (esp over longer ranges). It never
--- was robust or maybe it's simply not meant for too complex cases (well, it
--- probably *is* tricky material). The 3.24 version was probably the best so far.
--- The fact that styles bleed between lexers even if their states are isolated is an
--- issue. Another issus is that zero characters in the text passed to the lexer can
--- mess things up (pdf files have them in streams).
---
--- For more complex 'languages', like web or xml, we need to make sure that we use
--- e.g. 'default' for spacing that makes up some construct. Ok, we then still have a
--- backtracking issue but less.
---
--- Good news for some ConTeXt users: there is now a scintillua plugin for notepad++
--- and we ship an ini file for that editor with some installation instructions
--- embedded. Also, TextAdept has a console so that we can run realtime. The spawner
--- is still not perfect (sometimes hangs) but it was enough reason to spend time on
--- making our lexer work with TextAdept and create a setup.
---
--- Some bad news. The interface changed (again) in textadept 10, some for the better
--- (but a bit different from what happens here) and some for the worse, especially
--- moving some code to the init file so we now need some bad hacks. I decided to
--- stay with the old method of defining lexers and because the lexer cannot be run
--- in parallel any more (some change in the binary?) I will probably also cleanup
--- code below as we no longer need to be compatible. Unfortunately textadept is too
--- much a moving target to simply kick in some (tex related) production flow (apart
--- from the fact that it doesn't yet have the scite like realtime console). I'll
--- keep an eye on it. Because we don't need many added features I might as well decide
--- to make a lean and mean instance (after all the license permits forking).
-
--- TRACING
---
--- The advantage is that we now can check more easily with regular Lua(TeX). We can
--- also use wine and print to the console (somehow stdout is intercepted there.) So,
--- I've added a bit of tracing. Interesting is to notice that each document gets its
--- own instance which has advantages but also means that when we are spellchecking
--- we reload the word lists each time. (In the past I assumed a shared instance and
--- took some precautions. But I can fix this.)
---
--- TODO
---
--- It would be nice if we could load some ConTeXt Lua modules (the basic set) and
--- then use resolvers and such. But it might not work well with scite.
---
--- The current lexer basics are still a mix between old and new. Maybe I should redo
--- some more. This is probably easier in TextAdept than in SciTE.
---
--- We have to make sure we don't overload ConTeXt definitions when this code is used
--- in ConTeXt. I still have to add some of the goodies that we have there in lexers
--- into these.
---
--- Maybe I should use a special stripped on the one hand and extended version of the
--- dll (stable api) and at least add a bit more interfacing to scintilla.
---
--- I need to investigate if we can use the already built in Lua instance so that we
--- can combine the power of lexing with extensions.
---
--- I need to play with hotspot and other properties like indicators (whatever they
--- are).
---
--- I want to get rid of these lexers.STYLE_XX and lexers.XX things. This is possible
--- when we give up compatibility. Generalize the helpers that I wrote for SciTE so
--- that they also can be used TextAdept.
---
--- I can make an export to ConTeXt, but first I'll redo the code that makes the
--- grammar, as we only seem to need
---
--- lexer._TOKENSTYLES : table
--- lexer._CHILDREN : flag
--- lexer._EXTRASTYLES : table
--- lexer._GRAMMAR : flag
---
--- lexers.load : function
--- lexers.lex : function
---
--- So, if we drop compatibility with other lex definitions, we can make things
--- simpler. However, in the meantime one can just do this:
---
--- context --extra=listing --scite [--compact --verycompact] somefile.tex
---
--- and get a printable document. So, this todo is a bit obsolete.
---
--- Properties is an ugly mess ... due to chages in the interface we're now left
--- with some hybrid that sort of works ok
-
--- textadept: buffer:colourise(0,-1)
-
-local lpeg = require("lpeg")
-
-local global = _G
-local find, gmatch, match, lower, upper, gsub, sub, format, byte = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub, string.sub, string.format, string.byte
-local concat, sort = table.concat, table.sort
-local type, next, setmetatable, rawset, tonumber, tostring = type, next, setmetatable, rawset, tonumber, tostring
-local R, P, S, V, C, Cp, Cs, Ct, Cmt, Cc, Cf, Cg, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Carg
-local lpegmatch = lpeg.match
-
-local usage = (textadept and "textadept") or (resolvers and "context") or "scite"
-local nesting = 0
-local output = nil
-
------ print = textadept and ui and ui.print or print -- crashes when ui is not yet defined
-
-local function print(...)
- if not output then
- output = io.open("lexer.log","w")
- end
- output:write(...,"\n")
- output:flush()
-end
-
-local function report(fmt,str,...)
- if log then
- if str then
- fmt = format(fmt,str,...)
- end
- print(format("scite lpeg lexer > %s > %s",nesting == 0 and "-" or nesting,fmt))
- end
-end
-
-local function inform(...)
- if log and trace then
- report(...)
- end
-end
-
-inform("loading context lexer module (global table: %s)",tostring(global))
-
-do
-
- local floor = math and math.floor
- local format = format
- local tonumber = tonumber
-
- if not floor then
-
- if tonumber(string.match(_VERSION,"%d%.%d")) < 5.3 then
- floor = function(n)
- return tonumber(format("%d",n))
- end
- else
- -- 5.3 has a mixed number system and format %d doesn't work with
- -- floats any longer ... no fun
- floor = function(n)
- return (n - n % 1)
- end
- end
-
- math = math or { }
-
- math.floor = floor
-
- end
-
-end
-
-local floor = math.floor
-
-if not package.searchpath then
-
- -- Unfortunately the io library is only available when we end up
- -- in this branch of code.
-
- inform("using adapted function 'package.searchpath' (if used at all)")
-
- function package.searchpath(name,path)
- local tried = { }
- for part in gmatch(path,"[^;]+") do
- local filename = gsub(part,"%?",name)
- local f = io.open(filename,"r")
- if f then
- inform("file found on path: %s",filename)
- f:close()
- return filename
- end
- tried[#tried + 1] = format("no file '%s'",filename)
- end
- -- added: local path .. for testing
- local f = io.open(filename,"r")
- if f then
- inform("file found on current path: %s",filename)
- f:close()
- return filename
- end
- --
- tried[#tried + 1] = format("no file '%s'",filename)
- return nil, concat(tried,"\n")
- end
-
-end
-
-local lexers = { }
-local context = { }
-local helpers = { }
-lexers.context = context
-lexers.helpers = helpers
-
-local patterns = { }
-context.patterns = patterns -- todo: lexers.patterns
-
-context.report = report
-context.inform = inform
-
-lexers.LEXERPATH = package.path -- can be multiple paths separated by ;
-
-if resolvers then
- -- todo: set LEXERPATH
- -- todo: set report
-end
-
-local function sortedkeys(hash) -- simple version, good enough for here
- local t, n = { }, 0
- for k, v in next, hash do
- t[#t+1] = k
- local l = #tostring(k)
- if l > n then
- n = l
- end
- end
- sort(t)
- return t, n
-end
-
-helpers.sortedkeys = sortedkeys
-
-local usedlexers = { }
-local parent_lexer = nil
-
--- The problem with styles is that there is some nasty interaction with scintilla
--- and each version of lexer dll/so has a different issue. So, from now on we will
--- just add them here. There is also a limit on some 30 styles. Maybe I should
--- hash them in order to reuse.
-
--- todo: work with proper hashes and analyze what styles are really used by a
--- lexer
-
-local default = {
- "nothing", "whitespace", "comment", "string", "number", "keyword",
- "identifier", "operator", "error", "preprocessor", "constant", "variable",
- "function", "type", "label", "embedded",
- "quote", "special", "extra", "reserved", "okay", "warning",
- "command", "internal", "preamble", "grouping", "primitive", "plain",
- "user",
- -- not used (yet) .. we cross the 32 boundary so had to patch the initializer, see (1)
- "char", "class", "data", "definition", "invisible", "regex",
- "standout", "tag",
- "text",
-}
-
-local predefined = {
- "default", "linenumber", "bracelight", "bracebad", "controlchar",
- "indentguide", "calltip",
- -- seems new
- "folddisplaytext"
-}
-
--- Bah ... ugly ... nicer would be a proper hash .. we now have properties
--- as well as STYLE_* and some connection between them ... why .. ok, we
--- could delay things but who cares. Anyway, at this moment the properties
--- are still unknown.
-
-local function preparestyles(list)
- local reverse = { }
- for i=1,#list do
- local k = list[i]
- local K = upper(k)
- local s = "style." .. k
- lexers[K] = k -- is this used
- lexers["STYLE_"..K] = "$(" .. k .. ")"
- reverse[k] = true
- end
- return reverse
-end
-
-local defaultstyles = preparestyles(default)
-local predefinedstyles = preparestyles(predefined)
-
--- These helpers are set afterwards so we delay their initialization ... there
--- is no need to alias each time again and this way we can more easily adapt
--- to updates.
-
--- These keep changing (values, functions, tables ...) so we nee to check these
--- with each update. Some of them are set in the loader (the require 'lexer' is
--- in fact not a real one as the lexer code is loaded in the dll). It's also not
--- getting more efficient.
-
--- FOLD_BASE = lexers.FOLD_BASE or SC_FOLDLEVELBASE
--- FOLD_HEADER = lexers.FOLD_HEADER or SC_FOLDLEVELHEADERFLAG
--- FOLD_BLANK = lexers.FOLD_BLANK or SC_FOLDLEVELWHITEFLAG
--- get_style_at = lexers.get_style_at or GetStyleAt
--- get_indent_amount = lexers.get_indent_amount or GetIndentAmount
--- get_property = lexers.get_property or GetProperty
--- get_fold_level = lexers.get_fold_level or GetFoldLevel
-
--- It needs checking: do we have access to all properties now? I'll clean
--- this up anyway as I want a simple clean and stable model.
-
--- This is somewhat messy. The lexer dll provides some virtual fields:
---
--- + property
--- + property_int
--- + style_at
--- + fold_level
--- + indent_amount
---
--- but for some reasons not:
---
--- + property_expanded
---
--- As a consequence we need to define it here because otherwise the
--- lexer will crash. The fuzzy thing is that we don't have to define
--- the property and property_int tables but we do have to define the
--- expanded beforehand. The folding properties are no longer interfaced
--- so the interface to scite is now rather weak (only a few hard coded
--- properties).
-
-local FOLD_BASE = 0
-local FOLD_HEADER = 0
-local FOLD_BLANK = 0
-
-local style_at = { }
-local indent_amount = { }
-local fold_level = { }
-
-local function check_main_properties()
- if not lexers.property then
- lexers.property = { }
- end
- if not lexers.property_int then
- lexers.property_int = setmetatable({ }, {
- __index = function(t,k)
- -- why the tostring .. it relies on lua casting to a number when
- -- doing a comparison
- return tonumber(lexers.property[k]) or 0 -- tostring removed
- end,
- -- __newindex = function(t,k,v)
- -- report("properties are read-only, '%s' is not changed",k)
- -- end,
- })
- end
-end
-
-lexers.property_expanded = setmetatable({ }, {
- __index = function(t,k)
- -- better be safe for future changes .. what if at some point this is
- -- made consistent in the dll ... we need to keep an eye on that
- local property = lexers.property
- if not property then
- check_main_properties()
- end
- --
--- return gsub(property[k],"[$%%]%b()", function(k)
--- return t[sub(k,3,-2)]
--- end)
- local v = property[k]
- if v then
- v = gsub(v,"[$%%]%b()", function(k)
- return t[sub(k,3,-2)]
- end)
- end
- return v
- end,
- __newindex = function(t,k,v)
- report("properties are read-only, '%s' is not changed",k)
- end,
-})
-
--- A downward compatible feature but obsolete:
-
--- local function get_property(tag,default)
--- return lexers.property_int[tag] or lexers.property[tag] or default
--- end
-
--- We still want our own properties (as it keeps changing so better play
--- safe from now on). At some point I can freeze them.
-
-local function check_properties(lexer)
- if lexer.properties then
- return lexer
- end
- check_main_properties()
- -- we use a proxy
- local mainproperties = lexers.property
- local properties = { }
- local expanded = setmetatable({ }, {
- __index = function(t,k)
- return gsub(properties[k] or mainproperties[k],"[$%%]%b()", function(k)
- return t[sub(k,3,-2)]
- end)
- end,
- })
- lexer.properties = setmetatable(properties, {
- __index = mainproperties,
- __call = function(t,k,default) -- expands
- local v = expanded[k]
- local t = type(default)
- if t == "number" then
- return tonumber(v) or default
- elseif t == "boolean" then
- return v == nil and default or v
- else
- return v or default
- end
- end,
- })
- return lexer
-end
-
--- do
--- lexers.property = { foo = 123, red = "R" }
--- local a = check_properties({}) print("a.foo",a.properties.foo)
--- a.properties.foo = "bar" print("a.foo",a.properties.foo)
--- a.properties.foo = "bar:$(red)" print("a.foo",a.properties.foo) print("a.foo",a.properties("foo"))
--- end
-
-local function set(value,default)
- if value == 0 or value == false or value == "0" then
- return false
- elseif value == 1 or value == true or value == "1" then
- return true
- else
- return default
- end
-end
-
-local function check_context_properties()
- local property = lexers.property -- let's hope that this stays
- log = set(property["lexer.context.log"], log)
- trace = set(property["lexer.context.trace"], trace)
- detail = set(property["lexer.context.detail"], detail)
- show = set(property["lexer.context.show"], show)
- collapse = set(property["lexer.context.collapse"],collapse)
- inspect = set(property["lexer.context.inspect"], inspect)
-end
-
-function context.registerproperties(p) -- global
- check_main_properties()
- local property = lexers.property -- let's hope that this stays
- for k, v in next, p do
- property[k] = v
- end
- check_context_properties()
-end
-
-context.properties = setmetatable({ }, {
- __index = lexers.property,
- __newindex = function(t,k,v)
- check_main_properties()
- lexers.property[k] = v
- check_context_properties()
- end,
-})
-
--- We want locals to we set them delayed. Once.
-
-local function initialize()
- FOLD_BASE = lexers.FOLD_BASE
- FOLD_HEADER = lexers.FOLD_HEADER
- FOLD_BLANK = lexers.FOLD_BLANK
- --
- style_at = lexers.style_at -- table
- indent_amount = lexers.indent_amount -- table
- fold_level = lexers.fold_level -- table
- --
- check_main_properties()
- --
- initialize = nil
-end
-
--- Style handler.
---
--- The property table will be set later (after loading) by the library. The
--- styleset is not needed any more as we predefine all styles as defaults
--- anyway (too bug sensitive otherwise).
-
-local function tocolors(colors)
- local colorset = { }
- local property_int = lexers.property_int or { }
- for k, v in next, colors do
- if type(v) == "table" then
- local r, g, b = v[1], v[2], v[3]
- if r and g and b then
- v = tonumber(format("%02X%02X%02X",b,g,r),16) or 0 -- hm
- elseif r then
- v = tonumber(format("%02X%02X%02X",r,r,r),16) or 0
- else
- v = 0
- end
- end
- colorset[k] = v
- property_int["color."..k] = v
- end
- return colorset
-end
-
-local function toproperty(specification)
- local serialized = { }
- for key, value in next, specification do
- if value == true then
- serialized[#serialized+1] = key
- elseif type(value) == "table" then
- local r, g, b = value[1], value[2], value[3]
- if r and g and b then
- value = format("#%02X%02X%02X",r,g,b) or "#000000"
- elseif r then
- value = format("#%02X%02X%02X",r,r,r) or "#000000"
- else
- value = "#000000"
- end
- serialized[#serialized+1] = key .. ":" .. value
- else
- serialized[#serialized+1] = key .. ":" .. tostring(value)
- end
- end
- return concat(serialized,",")
-end
-
-local function tostyles(styles)
- local styleset = { }
- local property = lexers.property or { }
- for k, v in next, styles do
- v = toproperty(v)
- styleset[k] = v
- property["style."..k] = v
- end
- return styleset
-end
-
-context.toproperty = toproperty
-context.tostyles = tostyles
-context.tocolors = tocolors
-
--- If we had one instance/state of Lua as well as all regular libraries
--- preloaded we could use the context base libraries. So, let's go poor-
--- mans solution now.
-
-function context.registerstyles(styles)
- local styleset = tostyles(styles)
- context.styles = styles
- context.styleset = styleset
- if detail then
- local t, n = sortedkeys(styleset)
- local template = " %-" .. n .. "s : %s"
- report("initializing styleset:")
- for i=1,#t do
- local k = t[i]
- report(template,k,styleset[k])
- end
- elseif trace then
- report("initializing styleset")
- end
-end
-
-function context.registercolors(colors) -- needed for textadept
- local colorset = tocolors(colors)
- context.colors = colors
- context.colorset = colorset
- if detail then
- local t, n = sortedkeys(colorset)
- local template = " %-" .. n .. "s : %i"
- report("initializing colorset:")
- for i=1,#t do
- local k = t[i]
- report(template,k,colorset[k])
- end
- elseif trace then
- report("initializing colorset")
- end
-end
-
--- Some spell checking related stuff. Unfortunately we cannot use a path set
--- by property. This will get a hook for resolvers.
-
-local locations = {
- "context/lexers", -- context lexers
- "context/lexers/data", -- context lexers
- "../lexers", -- original lexers
- "../lexers/data", -- original lexers
- ".", -- whatever
- "./data", -- whatever
-}
-
--- local function collect(name)
--- local root = gsub(lexers.LEXERPATH or ".","/.-lua$","") .. "/" -- this is a horrible hack
--- -- report("module '%s' locating '%s'",tostring(lexers),name)
--- for i=1,#locations do
--- local fullname = root .. locations[i] .. "/" .. name .. ".lua" -- so we can also check for .luc
--- if trace then
--- report("attempt to locate '%s'",fullname)
--- end
--- local okay, result = pcall(function () return dofile(fullname) end)
--- if okay then
--- return result, fullname
--- end
--- end
--- end
-
-local collect
-
-if usage == "context" then
-
- collect = function(name)
- return require(name), name
- end
-
-else
-
- collect = function(name)
- local rootlist = lexers.LEXERPATH or "."
- for root in gmatch(rootlist,"[^;]+") do
- local root = gsub(root,"/[^/]-lua$","")
- for i=1,#locations do
- local fullname = root .. "/" .. locations[i] .. "/" .. name .. ".lua" -- so we can also check for .luc
- if trace then
- report("attempt to locate '%s'",fullname)
- end
- local okay, result = pcall(function () return dofile(fullname) end)
- if okay then
- return result, fullname
- end
- end
- end
- -- return require(name), name
- end
-
-end
-
-function context.loadluafile(name)
- local data, fullname = collect(name)
- if data then
- if trace then
- report("lua file '%s' has been loaded",fullname)
- end
- return data, fullname
- end
- if not textadept then
- report("unable to load lua file '%s'",name)
- end
-end
-
--- in fact we could share more as we probably process the data but then we need
--- to have a more advanced helper
-
-local cache = { }
-
-function context.loaddefinitions(name)
- local data = cache[name]
- if data then
- if trace then
- report("reusing definitions '%s'",name)
- end
- return data
- elseif trace and data == false then
- report("definitions '%s' were not found",name)
- end
- local data, fullname = collect(name)
- if not data then
- if not textadept then
- report("unable to load definition file '%s'",name)
- end
- data = false
- elseif trace then
- report("definition file '%s' has been loaded",fullname)
- if detail then
- local t, n = sortedkeys(data)
- local template = " %-" .. n .. "s : %s"
- for i=1,#t do
- local k = t[i]
- local v = data[k]
- if type(v) ~= "table" then
- report(template,k,tostring(v))
- elseif #v > 0 then
- report(template,k,#v)
- else
- -- no need to show hash
- end
- end
- end
- end
- cache[name] = data
- return type(data) == "table" and data
-end
-
--- A bit of regression in textadept > 10 so updated ... done a bit different.
--- We don't use this in the context lexers anyway.
-
-function context.word_match(words,word_chars,case_insensitive)
- -- used to be proper tables ...
- if type(words) == "string" then
- local clean = gsub(words,"%-%-[^\n]+","")
- local split = { }
- for s in gmatch(clean,"%S+") do
- split[#split+1] = s
- end
- words = split
- end
- local list = { }
- for i=1,#words do
- list[words[i]] = true
- end
- if case_insensitive then
- for i=1,#words do
- list[lower(words[i])] = true
- end
- end
- local chars = S(word_chars or "")
- for i=1,#words do
- chars = chars + S(words[i])
- end
- local match = case_insensitive and
- function(input,index,word)
- -- We can speed mixed case if needed.
- return (list[word] or list[lower(word)]) and index or nil
- end
- or
- function(input,index,word)
- return list[word] and index or nil
- end
- return Cmt(chars^1,match)
-end
-
--- Patterns are grouped in a separate namespace but the regular lexers expect
--- shortcuts to be present in the lexers library. Maybe I'll incorporate some
--- of l-lpeg later.
-
-do
-
- local anything = P(1)
- local idtoken = R("az","AZ","\127\255","__")
- local digit = R("09")
- local sign = S("+-")
- local period = P(".")
- local octdigit = R("07")
- local hexdigit = R("09","AF","af")
- local lower = R("az")
- local upper = R("AZ")
- local alpha = upper + lower
- local space = S(" \n\r\t\f\v")
- local eol = S("\r\n")
- local backslash = P("\\")
- local decimal = digit^1
- local octal = P("0")
- * octdigit^1
- local hexadecimal = P("0") * S("xX")
- * (hexdigit^0 * period * hexdigit^1 + hexdigit^1 * period * hexdigit^0 + hexdigit^1)
- * (S("pP") * sign^-1 * hexdigit^1)^-1 -- *
- local integer = sign^-1
- * (hexadecimal + octal + decimal)
- local float = sign^-1
- * (digit^0 * period * digit^1 + digit^1 * period * digit^0 + digit^1)
- * S("eE") * sign^-1 * digit^1 -- *
-
- patterns.idtoken = idtoken
- patterns.digit = digit
- patterns.sign = sign
- patterns.period = period
- patterns.octdigit = octdigit
- patterns.hexdigit = hexdigit
- patterns.ascii = R("\000\127") -- useless
- patterns.extend = R("\000\255") -- useless
- patterns.control = R("\000\031")
- patterns.lower = lower
- patterns.upper = upper
- patterns.alpha = alpha
- patterns.decimal = decimal
- patterns.octal = octal
- patterns.hexadecimal = hexadecimal
- patterns.float = float
- patterns.cardinal = decimal
-
- patterns.signeddecimal = sign^-1 * decimal
- patterns.signedoctal = sign^-1 * octal
- patterns.signedhexadecimal = sign^-1 * hexadecimal
- patterns.integer = integer
- patterns.real =
- sign^-1 * ( -- at most one
- digit^1 * period * digit^0 -- 10.0 10.
- + digit^0 * period * digit^1 -- 0.10 .10
- + digit^1 -- 10
- )
-
- patterns.anything = anything
- patterns.any = anything
- patterns.restofline = (1-eol)^1
- patterns.space = space
- patterns.spacing = space^1
- patterns.nospacing = (1-space)^1
- patterns.eol = eol
- patterns.newline = P("\r\n") + eol
- patterns.backslash = backslash
-
- local endof = S("\n\r\f")
-
- patterns.startofline = P(function(input,index)
- return (index == 1 or lpegmatch(endof,input,index-1)) and index
- end)
-
- -- These are the expected ones for other lexers. Maybe all in own namespace
- -- and provide compatibility layer. or should I just remove them?
-
- lexers.any = anything
- lexers.ascii = ascii
- lexers.extend = extend
- lexers.alpha = alpha
- lexers.digit = digit
- lexers.alnum = alpha + digit
- lexers.lower = lower
- lexers.upper = upper
- lexers.xdigit = hexdigit
- lexers.cntrl = control
- lexers.graph = R("!~")
- lexers.print = R(" ~")
- lexers.punct = R("!/", ":@", "[\'", "{~")
- lexers.space = space
- lexers.newline = S("\r\n\f")^1
- lexers.nonnewline = 1 - lexers.newline
- lexers.nonnewline_esc = 1 - (lexers.newline + '\\') + backslash * anything
- lexers.dec_num = decimal
- lexers.oct_num = octal
- lexers.hex_num = hexadecimal
- lexers.integer = integer
- lexers.float = float
- lexers.word = (alpha + "_") * (alpha + digit + "_")^0 -- weird, why digits
-
-end
-
--- end of patterns
-
-function context.exact_match(words,word_chars,case_insensitive)
- local characters = concat(words)
- local pattern -- the concat catches _ etc
- if word_chars == true or word_chars == false or word_chars == nil then
- word_chars = ""
- end
- if type(word_chars) == "string" then
- pattern = S(characters) + patterns.idtoken
- if case_insensitive then
- pattern = pattern + S(upper(characters)) + S(lower(characters))
- end
- if word_chars ~= "" then
- pattern = pattern + S(word_chars)
- end
- elseif word_chars then
- pattern = word_chars
- end
- if case_insensitive then
- local list = { }
- if #words == 0 then
- for k, v in next, words do
- list[lower(k)] = v
- end
- else
- for i=1,#words do
- list[lower(words[i])] = true
- end
- end
- return Cmt(pattern^1, function(_,i,s)
- return list[lower(s)] -- and i or nil
- end)
- else
- local list = { }
- if #words == 0 then
- for k, v in next, words do
- list[k] = v
- end
- else
- for i=1,#words do
- list[words[i]] = true
- end
- end
- return Cmt(pattern^1, function(_,i,s)
- return list[s] -- and i or nil
- end)
- end
-end
-
-function context.just_match(words)
- local p = P(words[1])
- for i=2,#words do
- p = p + P(words[i])
- end
- return p
-end
-
--- spell checking (we can only load lua files)
---
--- return {
--- min = 3,
--- max = 40,
--- n = 12345,
--- words = {
--- ["someword"] = "someword",
--- ["anotherword"] = "Anotherword",
--- },
--- }
-
-local lists = { }
-local disabled = false
-
-function context.disablewordcheck()
- disabled = true
-end
-
-function context.setwordlist(tag,limit) -- returns hash (lowercase keys and original values)
- if not tag or tag == "" then
- return false, 3
- end
- local list = lists[tag]
- if not list then
- list = context.loaddefinitions("spell-" .. tag)
- if not list or type(list) ~= "table" then
- if not textadept then
- report("invalid spell checking list for '%s'",tag)
- end
- list = { words = false, min = 3 }
- else
- list.words = list.words or false
- list.min = list.min or 3
- end
- lists[tag] = list
- end
- if trace then
- report("enabling spell checking for '%s' with minimum '%s'",tag,list.min)
- end
- return list.words, list.min
-end
-
-patterns.wordtoken = R("az","AZ","\127\255")
-patterns.wordpattern = patterns.wordtoken^3 -- todo: if limit and #s < limit then
-
-function context.checkedword(validwords,validminimum,s,i) -- ,limit
- if not validwords then -- or #s < validminimum then
- return true, "text", i -- true, "default", i
- else
- -- keys are lower
- local word = validwords[s]
- if word == s then
- return true, "okay", i -- exact match
- elseif word then
- return true, "warning", i -- case issue
- else
- local word = validwords[lower(s)]
- if word == s then
- return true, "okay", i -- exact match
- elseif word then
- return true, "warning", i -- case issue
- elseif upper(s) == s then
- return true, "warning", i -- probably a logo or acronym
- else
- return true, "error", i
- end
- end
- end
-end
-
-function context.styleofword(validwords,validminimum,s) -- ,limit
- if not validwords or #s < validminimum then
- return "text"
- else
- -- keys are lower
- local word = validwords[s]
- if word == s then
- return "okay" -- exact match
- elseif word then
- return "warning" -- case issue
- else
- local word = validwords[lower(s)]
- if word == s then
- return "okay" -- exact match
- elseif word then
- return "warning" -- case issue
- elseif upper(s) == s then
- return "warning" -- probably a logo or acronym
- else
- return "error"
- end
- end
- end
-end
-
--- overloaded functions
-
-local h_table, b_table, n_table = { }, { }, { } -- from the time small tables were used (optimization)
-
-setmetatable(h_table, { __index = function(t,level) local v = { level, FOLD_HEADER } t[level] = v return v end })
-setmetatable(b_table, { __index = function(t,level) local v = { level, FOLD_BLANK } t[level] = v return v end })
-setmetatable(n_table, { __index = function(t,level) local v = { level } t[level] = v return v end })
-
-local newline = patterns.newline
-local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
-local p_nop = newline
-
-local folders = { }
-
--- Snippets from the > 10 code .. but we do things different so ...
-
-local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- local folder = folders[lexer]
- if not folder then
- --
- local pattern, folds, text, start_pos, line_num, prev_level, current_level
- --
- local fold_symbols = lexer._foldsymbols
- local fold_pattern = lexer._foldpattern -- use lpeg instead (context extension)
- --
- -- textadept >= 10
- --
- -- local zerosumlines = lexer.property_int["fold.on.zero.sum.lines"] > 0 -- not done
- -- local compact = lexer.property_int['fold.compact'] > 0 -- not done
- -- local lowercase = lexer._CASEINSENSITIVEFOLDPOINTS -- useless (utf will distort)
- --
- if fold_pattern then
- -- if no functions are found then we could have a faster one
- fold_pattern = Cp() * C(fold_pattern) / function(s,match)
- local symbols = fold_symbols[style_at[start_pos + s]]
- if symbols then
- local l = symbols[match]
- if l then
- current_level = current_level + l
- end
- end
- end
- local action_y = function()
- folds[line_num] = prev_level
- if current_level > prev_level then
- folds[line_num] = prev_level + FOLD_HEADER
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- prev_level = current_level
- line_num = line_num + 1
- end
- local action_n = function()
- folds[line_num] = prev_level + FOLD_BLANK
- line_num = line_num + 1
- end
- pattern = ((fold_pattern + (1-newline))^1 * newline / action_y + newline/action_n)^0
-
- else
- -- the traditional one but a bit optimized
- local fold_symbols_patterns = fold_symbols._patterns
- local action_y = function(pos,line)
- for j=1, #fold_symbols_patterns do
- for s, match in gmatch(line,fold_symbols_patterns[j]) do -- "()(" .. patterns[i] .. ")"
- local symbols = fold_symbols[style_at[start_pos + pos + s - 1]]
- local l = symbols and symbols[match]
- local t = type(l)
- if t == "number" then
- current_level = current_level + l
- elseif t == "function" then
- current_level = current_level + l(text, pos, line, s, match)
- end
- end
- end
- folds[line_num] = prev_level
- if current_level > prev_level then
- folds[line_num] = prev_level + FOLD_HEADER
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- prev_level = current_level
- line_num = line_num + 1
- end
- local action_n = function()
- folds[line_num] = prev_level + FOLD_BLANK
- line_num = line_num + 1
- end
- pattern = (p_yes/action_y + p_nop/action_n)^0
- end
- --
- local reset_parser = lexer._reset_parser
- --
- folder = function(_text_,_start_pos_,_start_line_,_start_level_)
- if reset_parser then
- reset_parser()
- end
- folds = { }
- text = _text_
- start_pos = _start_pos_
- line_num = _start_line_
- prev_level = _start_level_
- current_level = prev_level
- lpegmatch(pattern,text)
- -- make folds collectable
- local t = folds
- folds = nil
- return t
- end
- folders[lexer] = folder
- end
- return folder(text,start_pos,start_line,start_level,lexer)
-end
-
-local folds, current_line, prev_level
-
-local function action_y()
- local current_level = FOLD_BASE + indent_amount[current_line]
- if current_level > prev_level then -- next level
- local i = current_line - 1
- local f
- while true do
- f = folds[i]
- if not f then
- break
- elseif f[2] == FOLD_BLANK then
- i = i - 1
- else
- f[2] = FOLD_HEADER -- low indent
- break
- end
- end
- folds[current_line] = { current_level } -- high indent
- elseif current_level < prev_level then -- prev level
- local f = folds[current_line - 1]
- if f then
- f[1] = prev_level -- high indent
- end
- folds[current_line] = { current_level } -- low indent
- else -- same level
- folds[current_line] = { prev_level }
- end
- prev_level = current_level
- current_line = current_line + 1
-end
-
-local function action_n()
- folds[current_line] = { prev_level, FOLD_BLANK }
- current_line = current_line + 1
-end
-
-local pattern = ( S("\t ")^0 * ( (1-patterns.eol)^1 / action_y + P(true) / action_n) * newline )^0
-
-local function fold_by_indentation(text,start_pos,start_line,start_level)
- -- initialize
- folds = { }
- current_line = start_line
- prev_level = start_level
- -- define
- -- -- not here .. pattern binds and local functions are not frozen
- -- analyze
- lpegmatch(pattern,text)
- -- flatten
- for line, level in next, folds do
- folds[line] = level[1] + (level[2] or 0)
- end
- -- done, make folds collectable
- local t = folds
- folds = nil
- return t
-end
-
-local function fold_by_line(text,start_pos,start_line,start_level)
- local folds = { }
- -- can also be lpeg'd
- for _ in gmatch(text,".-\r?\n") do
- folds[start_line] = n_table[start_level] -- { start_level } -- stile tables ? needs checking
- start_line = start_line + 1
- end
- return folds
-end
-
-local threshold_by_lexer = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_parsing = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_indentation = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_line = 512 * 1024 -- we don't know the filesize yet
-
-function context.fold(lexer,text,start_pos,start_line,start_level) -- hm, we had size thresholds .. where did they go
- if text == "" then
- return { }
- end
- if initialize then
- initialize()
- end
- local fold_by_lexer = lexer._fold
- local fold_by_symbols = lexer._foldsymbols
- local filesize = 0 -- we don't know that
- if fold_by_lexer then
- if filesize <= threshold_by_lexer then
- return fold_by_lexer(text,start_pos,start_line,start_level,lexer)
- end
- elseif fold_by_symbols then -- and lexer.properties("fold.by.parsing",1) > 0 then
- if filesize <= threshold_by_parsing then
- return fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- end
- elseif lexer._FOLDBYINDENTATION or lexer.properties("fold.by.indentation",1) > 0 then
- if filesize <= threshold_by_indentation then
- return fold_by_indentation(text,start_pos,start_line,start_level,lexer)
- end
- elseif lexer._FOLDBYLINE or lexer.properties("fold.by.line",1) > 0 then
- if filesize <= threshold_by_line then
- return fold_by_line(text,start_pos,start_line,start_level,lexer)
- end
- end
- return { }
-end
-
--- The following code is mostly unchanged:
-
-local function add_rule(lexer,id,rule) -- unchanged
- if not lexer._RULES then
- lexer._RULES = { }
- lexer._RULEORDER = { }
- end
- lexer._RULES[id] = rule
- lexer._RULEORDER[#lexer._RULEORDER + 1] = id
-end
-
-local function modify_rule(lexer,id,rule) -- needed for textadept > 10
- if lexer._lexer then
- lexer = lexer._lexer
- end
- lexer._RULES[id] = rule
-end
-
-local function get_rule(lexer,id) -- needed for textadept > 10
- if lexer._lexer then
- lexer = lexer._lexer
- end
- return lexer._RULES[id]
-end
-
--- I finally figured out that adding more styles was an issue because of several
--- reasons:
---
--- + in old versions there was a limit in the amount, so we overran the built-in
--- hard coded scintilla range
--- + then, the add_style function didn't check for already known ones, so again
--- we had an overrun (with some magic that could be avoided)
--- + then, when I messed with a new default set I realized that there is no check
--- in initializing _TOKENSTYLES (here the inspect function helps)
--- + of course it was mostly a side effect of passing all the used styles to the
--- _tokenstyles instead of only the not-default ones but such a thing should not
--- matter (read: intercepted)
---
--- This finally removed a head-ache and was revealed by lots of tracing, which I
--- should have built in way earlier.
-
-local function add_style(lexer,token_name,style) -- changed a bit around 3.41
- -- We don't add styles that are already defined as this can overflow the
- -- amount possible (in old versions of scintilla).
- if defaultstyles[token_name] then
- if trace and detail then
- report("default style '%s' is ignored as extra style",token_name)
- end
- if textadept then
- -- go on, stored per buffer
- else
- return
- end
- elseif predefinedstyles[token_name] then
- if trace and detail then
- report("predefined style '%s' is ignored as extra style",token_name)
- end
- if textadept then
- -- go on, stored per buffer
- else
- return
- end
- else
- if trace and detail then
- report("adding extra style '%s' as '%s'",token_name,style)
- end
- end
- -- This is unchanged. We skip the dangerous zone.
- local num_styles = lexer._numstyles
- if num_styles == 32 then
- num_styles = num_styles + 8
- end
- if num_styles >= 255 then
- report("there can't be more than %s styles",255)
- end
- lexer._TOKENSTYLES[token_name] = num_styles
- lexer._EXTRASTYLES[token_name] = style
- lexer._numstyles = num_styles + 1
- -- hm, the original (now) also copies to the parent ._lexer
-end
-
-local function check_styles(lexer)
- -- Here we also use a check for the dangerous zone. That way we can have a
- -- larger default set. The original code just assumes that #default is less
- -- than the dangerous zone's start.
- local numstyles = 0
- local tokenstyles = { }
- for i=1, #default do
- if numstyles == 32 then
- numstyles = numstyles + 8
- end
- tokenstyles[default[i]] = numstyles
- numstyles = numstyles + 1
- end
- -- Unchanged.
- for i=1, #predefined do
- tokenstyles[predefined[i]] = i + 31
- end
- lexer._TOKENSTYLES = tokenstyles
- lexer._numstyles = numstyles
- lexer._EXTRASTYLES = { }
- return lexer
-end
-
--- At some point an 'any' append showed up in the original code ...
--- but I see no need to catch that case ... beter fix the specification.
---
--- hm, why are many joined twice
-
-local function join_tokens(lexer) -- slightly different from the original (no 'any' append)
- local patterns = lexer._RULES
- local order = lexer._RULEORDER
- -- report("lexer: %s, tokens: %s",lexer._NAME,table.concat(order," + "))
- if patterns and order then
- local token_rule = patterns[order[1]] -- normally whitespace
- for i=2,#order do
- token_rule = token_rule + patterns[order[i]]
- end
- if lexer._TYPE ~= "context" then
- token_rule = token_rule + lexers.token(lexers.DEFAULT, patterns.any)
- end
- lexer._TOKENRULE = token_rule
- return token_rule
- else
- return P(1)
- end
-end
-
--- hm, maybe instead of a grammer just a flat one
-
-local function add_lexer(grammar, lexer) -- mostly the same as the original
- local token_rule = join_tokens(lexer)
- local lexer_name = lexer._NAME
- local children = lexer._CHILDREN
- for i=1,#children do
- local child = children[i]
- if child._CHILDREN then
- add_lexer(grammar, child)
- end
- local child_name = child._NAME
- local rules = child._EMBEDDEDRULES[lexer_name]
- local rules_token_rule = grammar["__" .. child_name] or rules.token_rule
- local pattern = (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1
- grammar[child_name] = pattern * V(lexer_name)
- local embedded_child = "_" .. child_name
- grammar[embedded_child] = rules.start_rule * pattern
- token_rule = V(embedded_child) + token_rule
- end
- if trace then
- report("adding lexer '%s' with %s children",lexer_name,#children)
- end
- grammar["__" .. lexer_name] = token_rule
- grammar[lexer_name] = token_rule^0
-end
-
-local function build_grammar(lexer,initial_rule) -- same as the original
- local children = lexer._CHILDREN
- local lexer_name = lexer._NAME
- local preamble = lexer._preamble
- local grammar = lexer._grammar
- -- if grammar then
- -- -- experiment
- -- elseif children then
- if children then
- if not initial_rule then
- initial_rule = lexer_name
- end
- grammar = { initial_rule }
- add_lexer(grammar, lexer)
- lexer._INITIALRULE = initial_rule
- grammar = Ct(P(grammar))
- if trace then
- report("building grammar for '%s' with whitespace '%s'and %s children",lexer_name,lexer.whitespace or "?",#children)
- end
- else
- grammar = Ct(join_tokens(lexer)^0)
- if trace then
- report("building grammar for '%s' with whitespace '%s'",lexer_name,lexer.whitespace or "?")
- end
- end
- if preamble then
- grammar = preamble^-1 * grammar
- end
- lexer._GRAMMAR = grammar
-end
-
--- So far. We need these local functions in the next one.
-
-local lineparsers = { }
-
-local maxmatched = 100
-
-local function collapsed(t)
- local lasttoken = nil
- local lastindex = nil
- for i=1,#t,2 do
- local token = t[i]
- local position = t[i+1]
- if token == lasttoken then
- t[lastindex] = position
- elseif lastindex then
- lastindex = lastindex + 1
- t[lastindex] = token
- lastindex = lastindex + 1
- t[lastindex] = position
- lasttoken = token
- else
- lastindex = i+1
- lasttoken = token
- end
- end
- for i=#t,lastindex+1,-1 do
- t[i] = nil
- end
- return t
-end
-
-local function matched(lexer,grammar,text)
- -- text = string.gsub(text,"\z","!")
- local t = lpegmatch(grammar,text)
- if trace then
- if show then
- report("output of lexer: %s (max %s entries)",lexer._NAME,maxmatched)
- local s = lexer._TOKENSTYLES
- local p = 1
- for i=1,2*maxmatched,2 do
- local n = i + 1
- local ti = t[i]
- local tn = t[n]
- if ti then
- local txt = sub(text,p,tn-1)
- if txt then
- txt = gsub(txt,"[%s]"," ")
- else
- txt = "!no text!"
- end
- report("%4i : %s > %s (%s) (%s)",floor(n/2),ti,tn,s[ti] or "!unset!",txt)
- p = tn
- else
- break
- end
- end
- end
- report("lexer results: %s, length: %s, ranges: %s",lexer._NAME,#text,floor(#t/2))
- if collapse then
- t = collapsed(t)
- report("lexer collapsed: %s, length: %s, ranges: %s",lexer._NAME,#text,floor(#t/2))
- end
- elseif collapse then
- t = collapsed(t)
- end
- return t
-end
-
--- Todo: make nice generic lexer (extra argument with start/stop commands) for
--- context itself.
---
--- In textadept >= 10 grammar building seem to have changed a bit. So, in retrospect
--- I could better have just dropped compatibility and stick to ctx lexers only.
-
-function context.lex(lexer,text,init_style)
- -- local lexer = global._LEXER
- local grammar = lexer._GRAMMAR
- if initialize then
- initialize()
- end
- if not grammar then
- return { }
- elseif lexer._LEXBYLINE then -- we could keep token
- local tokens = { }
- local offset = 0
- local noftokens = 0
- local lineparser = lineparsers[lexer]
- if not lineparser then -- probably a cmt is more efficient
- lineparser = C((1-newline)^0 * newline) / function(line)
- local length = #line
- local line_tokens = length > 0 and lpegmatch(grammar,line)
- if line_tokens then
- for i=1,#line_tokens,2 do
- noftokens = noftokens + 1
- tokens[noftokens] = line_tokens[i]
- noftokens = noftokens + 1
- tokens[noftokens] = line_tokens[i + 1] + offset
- end
- end
- offset = offset + length
- if noftokens > 0 and tokens[noftokens] ~= offset then
- noftokens = noftokens + 1
- tokens[noftokens] = "default"
- noftokens = noftokens + 1
- tokens[noftokens] = offset + 1
- end
- end
- lineparser = lineparser^0
- lineparsers[lexer] = lineparser
- end
- lpegmatch(lineparser,text)
- return tokens
- elseif lexer._CHILDREN then
- local hash = lexer._HASH -- hm, was _hash
- if not hash then
- hash = { }
- lexer._HASH = hash
- end
- grammar = hash[init_style]
- if grammar then
- lexer._GRAMMAR = grammar
- -- lexer._GRAMMAR = lexer._GRAMMAR or grammar
- else
- for style, style_num in next, lexer._TOKENSTYLES do
- if style_num == init_style then
- -- the name of the lexers is filtered from the whitespace
- -- specification .. weird code, should be a reverse hash
- local lexer_name = match(style,"^(.+)_whitespace") or lexer._NAME
- if lexer._INITIALRULE ~= lexer_name then
- grammar = hash[lexer_name]
- if not grammar then
- build_grammar(lexer,lexer_name)
- grammar = lexer._GRAMMAR
- hash[lexer_name] = grammar
- end
- end
- break
- end
- end
- grammar = grammar or lexer._GRAMMAR
- hash[init_style] = grammar
- end
- if trace then
- report("lexing '%s' with initial style '%s' and %s children", lexer._NAME,init_style,#lexer._CHILDREN or 0)
- end
- return matched(lexer,grammar,text)
- else
- if trace then
- report("lexing '%s' with initial style '%s'",lexer._NAME,init_style)
- end
- return matched(lexer,grammar,text)
- end
-end
-
--- hm, changed in 3.24 .. no longer small table but one table (so we could remove our
--- agressive optimization which worked quite well)
-
-function context.token(name, patt)
- return patt * Cc(name) * Cp()
-end
-
--- The next ones were mostly unchanged (till now), we moved it here when 3.41
--- became close to impossible to combine with cq. overload and a merge was
--- the only solution. It makes later updates more painful but the update to
--- 3.41 was already a bit of a nightmare anyway.
-
--- Loading lexers is rather interwoven with what the dll/so sets and
--- it changes over time. So, we need to keep an eye on changes. One
--- problem that we always faced were the limitations in length of
--- lexer names (as they get app/prepended occasionally to strings with
--- a hard coded limit). So, we always used alternative names and now need
--- to make sure this doesn't clash. As I no longer intend to use shipped
--- lexers I could strip away some of the code in the future, but keeping
--- it as reference makes sense.
-
--- I spend quite some time figuring out why 3.41 didn't work or crashed which
--- is hard when no stdout is available and when the io library is absent. In
--- the end of of the problems was in the _NAME setting. We set _NAME
--- to e.g. 'tex' but load from a file with a longer name, which we do
--- as we don't want to clash with existing files, we end up in
--- lexers not being found.
-
-local whitespaces = { }
-
-local function push_whitespace(name)
- table.insert(whitespaces,lexers.WHITESPACE or "whitespace")
- lexers.WHITESPACE = name .. "_whitespace"
-end
-
-local function pop_whitespace()
- lexers.WHITESPACE = table.remove(whitespaces) or "whitespace"
-end
-
-local function check_whitespace(lexer,name)
- if lexer then
- lexer.whitespace = (name or lexer.name or lexer._NAME) .. "_whitespace"
- end
-end
-
-function context.new(name,filename)
- local lexer = {
- _TYPE = "context",
- --
- _NAME = name, -- used for token building
- _FILENAME = filename, -- for diagnostic purposed
- --
- name = name,
- filename = filename,
- }
- if trace then
- report("initializing lexer tagged '%s' from file '%s'",name,filename or name)
- end
- check_whitespace(lexer)
- check_styles(lexer)
- check_properties(lexer)
- lexer._tokenstyles = context.styleset
- return lexer
-end
-
-local function nolexer(name)
- local lexer = {
- _TYPE = "unset",
- _NAME = name,
- -- _rules = { },
- }
- check_styles(lexer)
- check_whitespace(lexer)
- check_properties(lexer)
- return lexer
-end
-
-local function load_lexer(name,namespace)
- if trace then
- report("loading lexer file '%s'",name)
- end
- push_whitespace(namespace or name) -- for traditional lexers .. no alt_name yet
- local lexer, fullname = context.loadluafile(name)
- pop_whitespace()
- if not lexer then
- report("invalid lexer file '%s'",name)
- elseif trace then
- report("lexer file '%s' has been loaded",fullname)
- end
- if type(lexer) ~= "table" then
- if trace then
- report("lexer file '%s' gets a dummy lexer",name)
- end
- return nolexer(name)
- end
- if lexer._TYPE ~= "context" then
- lexer._TYPE = "native"
- check_styles(lexer)
- check_whitespace(lexer,namespace or name)
- check_properties(lexer)
- end
- if not lexer._NAME then
- lexer._NAME = name -- so: filename
- end
- if name ~= namespace then
- lexer._NAME = namespace
- end
- return lexer
-end
-
--- tracing ...
-
-local function inspect_lexer(lexer,level)
- -- If we had the regular libs available I could use the usual
- -- helpers.
- local parent = lexer._lexer
- lexer._lexer = nil -- prevent endless recursion
- local name = lexer._NAME
- local function showstyles_1(tag,styles)
- local numbers = { }
- for k, v in next, styles do
- numbers[v] = k
- end
- -- sort by number and make number hash too
- local keys = sortedkeys(numbers)
- for i=1,#keys do
- local k = keys[i]
- local v = numbers[k]
- report("[%s %s] %s %s = %s",level,name,tag,k,v)
- end
- end
- local function showstyles_2(tag,styles)
- local keys = sortedkeys(styles)
- for i=1,#keys do
- local k = keys[i]
- local v = styles[k]
- report("[%s %s] %s %s = %s",level,name,tag,k,v)
- end
- end
- local keys = sortedkeys(lexer)
- for i=1,#keys do
- local k = keys[i]
- local v = lexer[k]
- report("[%s %s] root key : %s = %s",level,name,k,tostring(v))
- end
- showstyles_1("token style",lexer._TOKENSTYLES)
- showstyles_2("extra style",lexer._EXTRASTYLES)
- local children = lexer._CHILDREN
- if children then
- for i=1,#children do
- inspect_lexer(children[i],level+1)
- end
- end
- lexer._lexer = parent
-end
-
-function context.inspect(lexer)
- inspect_lexer(lexer,0)
-end
-
--- An optional second argument has been introduced so that one can embed a lexer
--- more than once ... maybe something to look into (as not it's done by remembering
--- the start sequence ... quite okay but maybe suboptimal ... anyway, never change
--- a working solution).
-
--- namespace can be automatic: if parent then use name of parent (chain)
-
--- The original lexer framework had a rather messy user uinterface (e.g. moving
--- stuff from _rules to _RULES at some point but I could live with that. Now it uses
--- add_ helpers. But the subsystem is still not clean and pretty. Now, I can move to
--- the add_ but there is no gain in it so we support a mix which gives somewhat ugly
--- code. In fact, there should be proper subtables for this. I might actually do
--- this because we now always overload the normal lexer (parallel usage seems no
--- longer possible). For SciTE we can actually do a conceptual upgrade (more the
--- context way) because there is no further development there. That way we could
--- make even more advanced lexers.
-
-local savedrequire = require
-
-local escapes = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-function context.loadlexer(filename,namespace)
-
- if textadept then
- require = function(name)
- return savedrequire(name == "lexer" and "scite-context-lexer" or name)
- end
- end
-
- nesting = nesting + 1
- if not namespace then
- namespace = filename
- end
- local lexer = usedlexers[namespace] -- we load by filename but the internal name can be short
- if lexer then
- if trace then
- report("reusing lexer '%s'",namespace)
- end
- nesting = nesting - 1
- return lexer
- elseif trace then
- report("loading lexer '%s'",namespace)
- end
- --
- if initialize then
- initialize()
- end
- --
- parent_lexer = nil
- --
- lexer = load_lexer(filename,namespace) or nolexer(filename,namespace)
- usedlexers[filename] = lexer
- --
- if not lexer._rules and not lexer._lexer and not lexer_grammar then -- hmm should be lexer._grammar
- lexer._lexer = parent_lexer
- end
- --
- if lexer._lexer then
- local _l = lexer._lexer
- local _r = lexer._rules
- local _s = lexer._tokenstyles
- if not _l._tokenstyles then
- _l._tokenstyles = { }
- end
- if _r then
- local rules = _l._rules
- local name = lexer.name
- for i=1,#_r do
- local rule = _r[i]
- rules[#rules + 1] = {
- name .. "_" .. rule[1],
- rule[2],
- }
- end
- end
- if _s then
- local tokenstyles = _l._tokenstyles
- for token, style in next, _s do
- tokenstyles[token] = style
- end
- end
- lexer = _l
- end
- --
- local _r = lexer._rules
- local _g = lexer._grammar
- -- if _r or _g then
- if _r then
- local _s = lexer._tokenstyles
- if _s then
- for token, style in next, _s do
- add_style(lexer, token, style)
- end
- end
- if _r then
- for i=1,#_r do
- local rule = _r[i]
- add_rule(lexer, rule[1], rule[2])
- end
- end
- build_grammar(lexer)
- else
- -- other lexers
- build_grammar(lexer)
- end
- --
- add_style(lexer, lexer.whitespace, lexers.STYLE_WHITESPACE)
- --
- local foldsymbols = lexer._foldsymbols
- if foldsymbols then
- local patterns = foldsymbols._patterns
- if patterns then
- for i = 1, #patterns do
- patterns[i] = "()(" .. gsub(patterns[i],".",escapes) .. ")"
- end
- end
- end
- --
- lexer.lex = lexers.lex
- lexer.fold = lexers.fold
- --
- nesting = nesting - 1
- --
- if inspect then
- context.inspect(lexer)
- end
- --
- if textadept then
- require = savedrequire
- end
- --
- return lexer
-end
-
--- I probably need to check this occasionally with the original as I've messed around a bit
--- in the past to get nesting working well as one can hit the max number of styles, get
--- clashes due to fuzzy inheritance etc. so there is some interplay with the other patched
--- code.
-
-function context.embed_lexer(parent, child, start_rule, end_rule) -- mostly the same as the original
- local embeddedrules = child._EMBEDDEDRULES
- if not embeddedrules then
- embeddedrules = { }
- child._EMBEDDEDRULES = embeddedrules
- end
- if not child._RULES then
- local rules = child._rules
- if not rules then
- report("child lexer '%s' has no rules",child._NAME or "unknown")
- rules = { }
- child._rules = rules
- end
- for i=1,#rules do
- local rule = rules[i]
- add_rule(child, rule[1], rule[2])
- end
- end
- embeddedrules[parent._NAME] = {
- ["start_rule"] = start_rule,
- ["token_rule"] = join_tokens(child),
- ["end_rule"] = end_rule
- }
- local children = parent._CHILDREN
- if not children then
- children = { }
- parent._CHILDREN = children
- end
- children[#children + 1] = child
- local tokenstyles = parent._tokenstyles
- if not tokenstyles then
- tokenstyles = { }
- parent._tokenstyles = tokenstyles
- end
- local childname = child._NAME
- local whitespace = childname .. "_whitespace"
- tokenstyles[whitespace] = lexers.STYLE_WHITESPACE -- all these STYLE_THINGS will go .. just a proper hash
- if trace then
- report("using whitespace '%s' as trigger for '%s' with property '%s'",whitespace,childname,lexers.STYLE_WHITESPACE)
- end
- local childstyles = child._tokenstyles
- if childstyles then
- for token, style in next, childstyles do
- tokenstyles[token] = style
- end
- end
- -- new, a bit redone, untested, no clue yet what it is for
- local parentsymbols = parent._foldsymbols
- local childsymbols = child ._foldsymbols
- if not parentsymbols then
- parentsymbols = { }
- parent._foldsymbols = parentsymbols
- end
- if childsymbols then
- for token, symbols in next, childsymbols do
- local tokensymbols = parentsymbols[token]
- if not tokensymbols then
- tokensymbols = { }
- parentsymbols[token] = tokensymbols
- end
- for k, v in next, symbols do
- if type(k) == 'number' then
- tokensymbols[#tokensymbols + 1] = v
- elseif not tokensymbols[k] then
- tokensymbols[k] = v
- end
- end
- end
- end
- --
- child._lexer = parent
- parent_lexer = parent
-end
-
--- we now move the adapted code to the lexers namespace
-
-lexers.new = context.new
-lexers.load = context.loadlexer
-------.loadlexer = context.loadlexer
-lexers.loadluafile = context.loadluafile
-lexers.embed_lexer = context.embed_lexer
-lexers.fold = context.fold
-lexers.lex = context.lex
-lexers.token = context.token
-lexers.word_match = context.word_match
-lexers.exact_match = context.exact_match
-lexers.just_match = context.just_match
-lexers.inspect = context.inspect
-lexers.report = context.report
-lexers.inform = context.inform
-
--- helper .. alas ... in scite the lexer's lua instance is rather crippled .. not
--- even math is part of it
-
-do
-
- local floor = math and math.floor
- local char = string.char
- local format = format
- local tonumber = tonumber
-
- local function utfchar(n)
- if n < 0x80 then
- return char(n)
- elseif n < 0x800 then
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x40000 then
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
- end
- end
-
- context.utfchar = utfchar
-
- -- -- the next one is good enough for use here but not perfect (see context for a
- -- -- better one)
- --
- -- local function make(t)
- -- local p
- -- for k, v in next, t do
- -- if not p then
- -- if next(v) then
- -- p = P(k) * make(v)
- -- else
- -- p = P(k)
- -- end
- -- else
- -- if next(v) then
- -- p = p + P(k) * make(v)
- -- else
- -- p = p + P(k)
- -- end
- -- end
- -- end
- -- return p
- -- end
- --
- -- function lpeg.utfchartabletopattern(list)
- -- local tree = { }
- -- for i=1,#list do
- -- local t = tree
- -- for c in gmatch(list[i],".") do
- -- if not t[c] then
- -- t[c] = { }
- -- end
- -- t = t[c]
- -- end
- -- end
- -- return make(tree)
- -- end
-
- local utf8next = R("\128\191")
- local utf8one = R("\000\127")
- local utf8two = R("\194\223") * utf8next
- local utf8three = R("\224\239") * utf8next * utf8next
- local utf8four = R("\240\244") * utf8next * utf8next * utf8next
-
- local utfidentifier = utf8two + utf8three + utf8four
- helpers.utfidentifier = (R("AZ","az","__") + utfidentifier)
- * (R("AZ","az","__","09") + utfidentifier)^0
-
- helpers.utfcharpattern = P(1) * utf8next^0 -- unchecked but fast
- helpers.utfbytepattern = utf8one / byte
- + utf8two / function(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
- + utf8three / function(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
- + utf8four / function(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
- local p_false = P(false)
- local p_true = P(true)
-
- local function make(t)
- local function making(t)
- local p = p_false
- local keys = sortedkeys(t)
- for i=1,#keys do
- local k = keys[i]
- if k ~= "" then
- local v = t[k]
- if v == true then
- p = p + P(k) * p_true
- elseif v == false then
- -- can't happen
- else
- p = p + P(k) * making(v)
- end
- end
- end
- if t[""] then
- p = p + p_true
- end
- return p
- end
- local p = p_false
- local keys = sortedkeys(t)
- for i=1,#keys do
- local k = keys[i]
- if k ~= "" then
- local v = t[k]
- if v == true then
- p = p + P(k) * p_true
- elseif v == false then
- -- can't happen
- else
- p = p + P(k) * making(v)
- end
- end
- end
- return p
- end
-
- local function collapse(t,x)
- if type(t) ~= "table" then
- return t, x
- else
- local n = next(t)
- if n == nil then
- return t, x
- elseif next(t,n) == nil then
- -- one entry
- local k = n
- local v = t[k]
- if type(v) == "table" then
- return collapse(v,x..k)
- else
- return v, x .. k
- end
- else
- local tt = { }
- for k, v in next, t do
- local vv, kk = collapse(v,k)
- tt[kk] = vv
- end
- return tt, x
- end
- end
- end
-
- function helpers.utfchartabletopattern(list)
- local tree = { }
- local n = #list
- if n == 0 then
- for s in next, list do
- local t = tree
- local p, pk
- for c in gmatch(s,".") do
- if t == true then
- t = { [c] = true, [""] = true }
- p[pk] = t
- p = t
- t = false
- elseif t == false then
- t = { [c] = false }
- p[pk] = t
- p = t
- t = false
- else
- local tc = t[c]
- if not tc then
- tc = false
- t[c] = false
- end
- p = t
- t = tc
- end
- pk = c
- end
- if t == false then
- p[pk] = true
- elseif t == true then
- -- okay
- else
- t[""] = true
- end
- end
- else
- for i=1,n do
- local s = list[i]
- local t = tree
- local p, pk
- for c in gmatch(s,".") do
- if t == true then
- t = { [c] = true, [""] = true }
- p[pk] = t
- p = t
- t = false
- elseif t == false then
- t = { [c] = false }
- p[pk] = t
- p = t
- t = false
- else
- local tc = t[c]
- if not tc then
- tc = false
- t[c] = false
- end
- p = t
- t = tc
- end
- pk = c
- end
- if t == false then
- p[pk] = true
- elseif t == true then
- -- okay
- else
- t[""] = true
- end
- end
- end
- collapse(tree,"")
- -- inspect(tree)
- return make(tree)
- end
-
- patterns.invisibles = helpers.utfchartabletopattern {
- utfchar(0x00A0), -- nbsp
- utfchar(0x2000), -- enquad
- utfchar(0x2001), -- emquad
- utfchar(0x2002), -- enspace
- utfchar(0x2003), -- emspace
- utfchar(0x2004), -- threeperemspace
- utfchar(0x2005), -- fourperemspace
- utfchar(0x2006), -- sixperemspace
- utfchar(0x2007), -- figurespace
- utfchar(0x2008), -- punctuationspace
- utfchar(0x2009), -- breakablethinspace
- utfchar(0x200A), -- hairspace
- utfchar(0x200B), -- zerowidthspace
- utfchar(0x202F), -- narrownobreakspace
- utfchar(0x205F), -- math thinspace
- }
-
- -- now we can make:
-
- patterns.iwordtoken = patterns.wordtoken - patterns.invisibles
- patterns.iwordpattern = patterns.iwordtoken^3
-
-end
-
--- The following helpers are not used, partially replaced by other mechanisms and
--- when needed I'll first optimize them. I only made them somewhat more readable.
-
-function lexers.delimited_range(chars, single_line, no_escape, balanced) -- unchanged
- local s = sub(chars,1,1)
- local e = #chars == 2 and sub(chars,2,2) or s
- local range
- local b = balanced and s or ""
- local n = single_line and "\n" or ""
- if no_escape then
- local invalid = S(e .. n .. b)
- range = patterns.any - invalid
- else
- local invalid = S(e .. n .. b) + patterns.backslash
- range = patterns.any - invalid + patterns.backslash * patterns.any
- end
- if balanced and s ~= e then
- return P {
- s * (range + V(1))^0 * e
- }
- else
- return s * range^0 * P(e)^-1
- end
-end
-
-function lexers.starts_line(patt) -- unchanged
- return P ( function(input, index)
- if index == 1 then
- return index
- end
- local char = sub(input,index - 1,index - 1)
- if char == "\n" or char == "\r" or char == "\f" then
- return index
- end
- end ) * patt
-end
-
-function lexers.last_char_includes(s) -- unchanged
- s = "[" .. gsub(s,"[-%%%[]", "%%%1") .. "]"
- return P ( function(input, index)
- if index == 1 then
- return index
- end
- local i = index
- while match(sub(input,i - 1,i - 1),"[ \t\r\n\f]") do
- i = i - 1
- end
- if match(sub(input,i - 1,i - 1),s) then
- return index
- end
- end)
-end
-
-function lexers.nested_pair(start_chars, end_chars) -- unchanged
- local s = start_chars
- local e = P(end_chars)^-1
- return P {
- s * (patterns.any - s - end_chars + V(1))^0 * e
- }
-end
-
-local function prev_line_is_comment(prefix, text, pos, line, s) -- unchanged
- local start = find(line,"%S")
- if start < s and not find(line,prefix,start,true) then
- return false
- end
- local p = pos - 1
- if sub(text,p,p) == "\n" then
- p = p - 1
- if sub(text,p,p) == "\r" then
- p = p - 1
- end
- if sub(text,p,p) ~= "\n" then
- while p > 1 and sub(text,p - 1,p - 1) ~= "\n"
- do p = p - 1
- end
- while find(sub(text,p,p),"^[\t ]$") do
- p = p + 1
- end
- return sub(text,p,p + #prefix - 1) == prefix
- end
- end
- return false
-end
-
-local function next_line_is_comment(prefix, text, pos, line, s)
- local p = find(text,"\n",pos + s)
- if p then
- p = p + 1
- while find(sub(text,p,p),"^[\t ]$") do
- p = p + 1
- end
- return sub(text,p,p + #prefix - 1) == prefix
- end
- return false
-end
-
-function lexers.fold_line_comments(prefix)
- local property_int = lexers.property_int
- return function(text, pos, line, s)
- if property_int["fold.line.comments"] == 0 then
- return 0
- end
- if s > 1 and match(line,"^%s*()") < s then
- return 0
- end
- local prev_line_comment = prev_line_is_comment(prefix, text, pos, line, s)
- local next_line_comment = next_line_is_comment(prefix, text, pos, line, s)
- if not prev_line_comment and next_line_comment then
- return 1
- end
- if prev_line_comment and not next_line_comment then
- return -1
- end
- return 0
- end
-end
-
--- There are some fundamental changes in textadept version 10 and I don't want to
--- adapt again so we go the reverse route: map new to old. This is needed because
--- we need to load other lexers which is teh result of not being able to load the
--- lexer framework in parallel. Something happened in 10 that makes the main lexer
--- always enforced so now we need to really replace that one (and even then it loads
--- twice (i can probably sort that out). Maybe there's now some hard coded magic
--- in the binary.
-
-if textadept then
-
- -- Folds are still somewhat weak because of the end condition not being
- -- bound to a start .. probably to complex and it seems to work anyhow. As
- -- we have extended thinsg we just remap.
-
- local function add_fold_point(lexer,token_name,start_symbol,end_symbol)
- if type(start_symbol) == "string" then
- local foldsymbols = lexer._foldsymbols
- if not foldsymbols then
- foldsymbols = { }
- lexer._foldsymbols = foldsymbols
- end
- local patterns = foldsymbols._patterns
- if not patterns then
- patterns = { }
- usedpatt = { } -- > 10 uses a mixed index/hash (we don't use patterns)
- foldsymbols._patterns = patterns
- foldsymbols._usedpatt = usedpatt
- end
- local foldsymbol = foldsymbols[token_name]
- if not foldsymbol then
- foldsymbol = { }
- foldsymbols[token_name] = foldsymbol
- end
- if not usedpatt[start_symbol] then
- patterns[#patterns+1] = start_symbol
- usedpatt[start_symbol] = true
- end
- if type(end_symbol) == "string" then
- foldsymbol[start_symbol] = 1
- foldsymbol[end_symbol] = -1
- if not usedpatt[end_symbol] then
- patterns[#patterns+1] = end_symbol
- usedpatt[end_symbol] = true
- end
- else
- foldsymbol[start_symbol] = end_symbol
- end
- end
- end
-
- local function add_style(lexer,name,style)
- local tokenstyles = lexer._tokenstyles
- if not tokenstyles then
- tokenstyles = { }
- lexer._tokenstyles = tokenstyles
- end
- tokenstyles[name] = style
- end
-
- local function add_rule(lexer,id,rule)
- local rules = lexer._rules
- if not rules then
- rules = { }
- lexer._rules = rules
- end
- rules[#rules+1] = { id, rule }
- end
-
- local function modify_rule(lexer,id,rule) -- needed for textadept > 10
- if lexer._lexer then
- lexer = lexer._lexer
- end
- local RULES = lexer._RULES
- if RULES then
- RULES[id] = rule
- end
- end
-
- local function get_rule(lexer,id) -- needed for textadept > 10
- if lexer._lexer then
- lexer = lexer._lexer
- end
- local RULES = lexer._RULES
- if RULES then
- return RULES[id]
- end
- end
-
- local new = context.new
- local lmt = {
- __index = {
-
- add_rule = add_rule,
- modify_rule = modify_rule,
- get_rule = get_rule,
- add_style = add_style,
- add_fold_point = add_fold_point,
-
- join_tokens = join_tokens,
- build_grammar = build_grammar,
-
- embed = lexers.embed,
- lex = lexers.lex,
- fold = lexers.fold
-
- }
- }
-
- function lexers.new(name,options)
- local lexer = new(name)
- if options then
- lexer._LEXBYLINE = options['lex_by_line']
- lexer._FOLDBYINDENTATION = options['fold_by_indentation']
- lexer._CASEINSENSITIVEFOLDPOINTS = options['case_insensitive_fold_points']
- lexer._lexer = options['inherit']
- end
- setmetatable(lexer,lmt)
- return lexer
- end
-
-end
-
--- done
-
-return lexers
diff --git a/context/data/textadept/context/lexers/lexer.rme b/context/data/textadept/context/lexers/lexer.rme
deleted file mode 100644
index 5e9604f63..000000000
--- a/context/data/textadept/context/lexers/lexer.rme
+++ /dev/null
@@ -1 +0,0 @@
-We have no lexer.lua here!
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-bibtex.lua b/context/data/textadept/context/lexers/scite-context-lexer-bibtex.lua
deleted file mode 100644
index b53da82ea..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-bibtex.lua
+++ /dev/null
@@ -1,195 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for bibtex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local global, string, table, lpeg = _G, string, table, lpeg
-local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
-local type = type
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local bibtexlexer = lexer.new("bib","scite-context-lexer-bibtex")
-local whitespace = bibtexlexer.whitespace
-
-local escape, left, right = P("\\"), P('{'), P('}')
-
-patterns.balanced = P {
- [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
-}
-
--- taken from bibl-bib.lua
-
-local anything = patterns.anything
-local percent = P("%")
-local start = P("@")
-local comma = P(",")
-local hash = P("#")
-local escape = P("\\")
-local single = P("'")
-local double = P('"')
-local left = P('{')
-local right = P('}')
-local lineending = S("\n\r")
-local space = S(" \t\n\r\f")
-local spaces = space^1
-local equal = P("=")
-
-local keyword = (R("az","AZ","09") + S("@_:-"))^1
------ s_quoted = ((escape*single) + spaces + (1-single))^0
------ d_quoted = ((escape*double) + spaces + (1-double))^0
-local s_quoted = ((escape*single) + (1-single))^0
-local d_quoted = ((escape*double) + (1-double))^0
-
-local balanced = patterns.balanced
-
-local t_spacing = token(whitespace, space^1)
-local t_optionalws = token("default", space^1)^0
-
-local t_equal = token("operator",equal)
-local t_left = token("grouping",left)
-local t_right = token("grouping",right)
-local t_comma = token("operator",comma)
-local t_hash = token("operator",hash)
-
-local t_s_value = token("operator",single)
- * token("text",s_quoted)
- * token("operator",single)
-local t_d_value = token("operator",double)
- * token("text",d_quoted)
- * token("operator",double)
-local t_b_value = token("operator",left)
- * token("text",balanced)
- * token("operator",right)
-local t_r_value = token("text",keyword)
-
-local t_keyword = token("keyword",keyword)
-local t_key = token("command",keyword)
-local t_label = token("warning",keyword)
-
-local t_somevalue = t_s_value + t_d_value + t_b_value + t_r_value
-local t_value = t_somevalue
- * ((t_optionalws * t_hash * t_optionalws) * t_somevalue)^0
-
-local t_assignment = t_optionalws
- * t_key
- * t_optionalws
- * t_equal
- * t_optionalws
- * t_value
-
-local t_shortcut = t_keyword
- * t_optionalws
- * t_left
- * t_optionalws
- * (t_assignment * t_comma^0)^0
- * t_optionalws
- * t_right
-
-local t_definition = t_keyword
- * t_optionalws
- * t_left
- * t_optionalws
- * t_label
- * t_optionalws
- * t_comma
- * (t_assignment * t_comma^0)^0
- * t_optionalws
- * t_right
-
-local t_comment = t_keyword
- * t_optionalws
- * t_left
- * token("text",(1-t_right)^0)
- * t_optionalws
- * t_right
-
-local t_forget = token("comment",percent^1 * (1-lineending)^0)
-
-local t_rest = token("default",anything)
-
--- this kind of lexing seems impossible as the size of the buffer passed to the lexer is not
--- large enough .. but we can cheat and use this:
---
--- function OnOpen(filename) editor:Colourise(1,editor.TextLength) end -- or is it 0?
-
--- somehow lexing fails on this more complex lexer when we insert something, there is no
--- backtracking to whitespace when we have no embedded lexer, so we fake one ... this works
--- to some extend but not in all cases (e.g. editing inside line fails) .. maybe i need to
--- patch the dll ... (better not)
-
-local dummylexer = lexer.load("scite-context-lexer-dummy","bib-dum")
-
-local dummystart = token("embedded",P("\001")) -- an unlikely to be used character
-local dummystop = token("embedded",P("\002")) -- an unlikely to be used character
-
-lexer.embed_lexer(bibtexlexer,dummylexer,dummystart,dummystop)
-
--- maybe we need to define each functional block as lexer (some 4) so i'll do that when
--- this issue is persistent ... maybe consider making a local lexer options (not load,
--- just lexer.new or so) .. or maybe do the reverse, embed the main one in a dummy child
-
-bibtexlexer._rules = {
- { "whitespace", t_spacing },
- { "forget", t_forget },
- { "shortcut", t_shortcut },
- { "definition", t_definition },
- { "comment", t_comment },
- { "rest", t_rest },
-}
-
--- local t_assignment = t_key
--- * t_optionalws
--- * t_equal
--- * t_optionalws
--- * t_value
---
--- local t_shortcut = t_keyword
--- * t_optionalws
--- * t_left
---
--- local t_definition = t_keyword
--- * t_optionalws
--- * t_left
--- * t_optionalws
--- * t_label
--- * t_optionalws
--- * t_comma
---
--- bibtexlexer._rules = {
--- { "whitespace", t_spacing },
--- { "assignment", t_assignment },
--- { "definition", t_definition },
--- { "shortcut", t_shortcut },
--- { "right", t_right },
--- { "comma", t_comma },
--- { "forget", t_forget },
--- { "comment", t_comment },
--- { "rest", t_rest },
--- }
-
-bibtexlexer._tokenstyles = context.styleset
-
-bibtexlexer._foldpattern = P("{") + P("}")
-
-bibtexlexer._foldsymbols = {
- _patterns = {
- "{",
- "}",
- },
- ["grouping"] = {
- ["{"] = 1,
- ["}"] = -1,
- },
-}
-
-return bibtexlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-bidi.lua b/context/data/textadept/context/lexers/scite-context-lexer-bidi.lua
deleted file mode 100644
index ea9c56712..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-bidi.lua
+++ /dev/null
@@ -1,598 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for plain text (with spell checking)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local P, S, Cmt, Cp = lpeg.P, lpeg.S, lpeg.Cmt, lpeg.Cp
-local find, match = string.find, string.match
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local bidilexer = lexer.new("bidi","scite-context-lexer-bidi")
-local whitespace = bidilexer.whitespace
-
-local space = patterns.space
-local any = patterns.any
-
--- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
--- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
-
-require("char-def")
-
-characters.directions = { }
-
-setmetatable(characters.directions,{ __index = function(t,k)
- local d = data[k]
- if d then
- local v = d.direction
- if v then
- t[k] = v
- return v
- end
- end
- t[k] = false -- maybe 'l'
- return false
-end })
-
-characters.mirrors = { }
-
-setmetatable(characters.mirrors,{ __index = function(t,k)
- local d = data[k]
- if d then
- local v = d.mirror
- if v then
- t[k] = v
- return v
- end
- end
- t[k] = false
- return false
-end })
-
-characters.textclasses = { }
-
-setmetatable(characters.textclasses,{ __index = function(t,k)
- local d = data[k]
- if d then
- local v = d.textclass
- if v then
- t[k] = v
- return v
- end
- end
- t[k] = false
- return false
-end })
-
-local directiondata = characters.directions
-local mirrordata = characters.mirrors
-local textclassdata = characters.textclasses
-
-local maximum_stack = 0xFF -- unicode: 60, will be jumped to 125, we don't care too much
-local analyze_fences = false
-
-local whitespace = {
- lre = true,
- rle = true,
- lro = true,
- rlo = true,
- pdf = true,
- bn = true,
- ws = true,
-}
-
-local b_s_ws_on = {
- b = true,
- s = true,
- ws = true,
- on = true
-}
-
-local mt_space = { __index = { char = 0x0020, direction = "ws", original = "ws", level = 0 } }
-local mt_lre = { __index = { char = 0x202A, direction = "lre", original = "lre", level = 0 } }
-local mt_rle = { __index = { char = 0x202B, direction = "rle", original = "rle", level = 0 } }
-local mt_pdf = { __index = { char = 0x202C, direction = "pdf", original = "pdf", level = 0 } }
-local mt_object = { __index = { char = 0xFFFC, direction = "on", original = "on", level = 0 } }
-
-local list = { }
-local stack = { }
-
-setmetatable(stack, { __index = function(t,k) local v = { } t[k] = v return v end })
-
-local function build_list(head)
- -- P1
- local size = 0
- lpegmatch(pattern,head)
- return list, size
-end
-
-local function resolve_fences(list,size,start,limit)
- -- N0: funny effects, not always better, so it's an option
- local nofstack = 0
- for i=start,limit do
- local entry = list[i]
- if entry.direction == "on" then
- local char = entry.char
- local mirror = mirrordata[char]
- if mirror then
- local class = textclassdata[char]
- entry.mirror = mirror
- entry.class = class
- if class == "open" then
- nofstack = nofstack + 1
- local stacktop = stack[nofstack]
- stacktop[1] = mirror
- stacktop[2] = i
- stacktop[3] = false -- not used
- elseif nofstack == 0 then
- -- skip
- elseif class == "close" then
- while nofstack > 0 do
- local stacktop = stack[nofstack]
- if stacktop[1] == char then
- local open = stacktop[2]
- local close = i
- list[open ].paired = close
- list[close].paired = open
- break
- else
- -- do we mirror or not
- end
- nofstack = nofstack - 1
- end
- end
- end
- end
- end
-end
-
-local function get_baselevel(list,size,direction)
- if direction == "TRT" then
- return 1, "TRT", true
- elseif direction == "TLT" then
- return 0, "TLT", true
- end
- -- P2, P3:
- for i=1,size do
- local entry = list[i]
- local direction = entry.direction
- if direction == "r" or direction == "al" then -- and an ?
- return 1, "TRT", true
- elseif direction == "l" then
- return 0, "TLT", true
- end
- end
- return 0, "TLT", false
-end
-
-local function resolve_explicit(list,size,baselevel)
--- if list.rle or list.lre or list.rlo or list.lro then
- -- X1
- local level = baselevel
- local override = "on"
- local nofstack = 0
- for i=1,size do
- local entry = list[i]
- local direction = entry.direction
- -- X2
- if direction == "rle" then
- if nofstack < maximum_stack then
- nofstack = nofstack + 1
- local stacktop = stack[nofstack]
- stacktop[1] = level
- stacktop[2] = override
- level = level + (level % 2 == 1 and 2 or 1) -- least_greater_odd(level)
- override = "on"
- entry.level = level
- entry.direction = "bn"
- entry.remove = true
- end
- -- X3
- elseif direction == "lre" then
- if nofstack < maximum_stack then
- nofstack = nofstack + 1
- local stacktop = stack[nofstack]
- stacktop[1] = level
- stacktop[2] = override
- level = level + (level % 2 == 1 and 1 or 2) -- least_greater_even(level)
- override = "on"
- entry.level = level
- entry.direction = "bn"
- entry.remove = true
- end
- -- X4
- elseif direction == "rlo" then
- if nofstack < maximum_stack then
- nofstack = nofstack + 1
- local stacktop = stack[nofstack]
- stacktop[1] = level
- stacktop[2] = override
- level = level + (level % 2 == 1 and 2 or 1) -- least_greater_odd(level)
- override = "r"
- entry.level = level
- entry.direction = "bn"
- entry.remove = true
- end
- -- X5
- elseif direction == "lro" then
- if nofstack < maximum_stack then
- nofstack = nofstack + 1
- local stacktop = stack[nofstack]
- stacktop[1] = level
- stacktop[2] = override
- level = level + (level % 2 == 1 and 1 or 2) -- least_greater_even(level)
- override = "l"
- entry.level = level
- entry.direction = "bn"
- entry.remove = true
- end
- -- X7
- elseif direction == "pdf" then
- if nofstack < maximum_stack then
- local stacktop = stack[nofstack]
- level = stacktop[1]
- override = stacktop[2]
- nofstack = nofstack - 1
- entry.level = level
- entry.direction = "bn"
- entry.remove = true
- end
- -- X6
- else
- entry.level = level
- if override ~= "on" then
- entry.direction = override
- end
- end
- end
--- else
--- for i=1,size do
--- list[i].level = baselevel
--- end
--- end
- -- X8 (reset states and overrides after paragraph)
-end
-
-local function resolve_weak(list,size,start,limit,orderbefore,orderafter)
- -- W1: non spacing marks get the direction of the previous character
--- if list.nsm then
- for i=start,limit do
- local entry = list[i]
- if entry.direction == "nsm" then
- if i == start then
- entry.direction = orderbefore
- else
- entry.direction = list[i-1].direction
- end
- end
- end
--- end
- -- W2: mess with numbers and arabic
--- if list.en then
- for i=start,limit do
- local entry = list[i]
- if entry.direction == "en" then
- for j=i-1,start,-1 do
- local prev = list[j]
- local direction = prev.direction
- if direction == "al" then
- entry.direction = "an"
- break
- elseif direction == "r" or direction == "l" then
- break
- end
- end
- end
- end
--- end
- -- W3
--- if list.al then
- for i=start,limit do
- local entry = list[i]
- if entry.direction == "al" then
- entry.direction = "r"
- end
- end
--- end
- -- W4: make separators number
--- if list.es or list.cs then
- -- skip
--- if false then
- if false then
- for i=start+1,limit-1 do
- local entry = list[i]
- local direction = entry.direction
- if direction == "es" then
- if list[i-1].direction == "en" and list[i+1].direction == "en" then
- entry.direction = "en"
- end
- elseif direction == "cs" then
- local prevdirection = list[i-1].direction
- if prevdirection == "en" then
- if list[i+1].direction == "en" then
- entry.direction = "en"
- end
- elseif prevdirection == "an" and list[i+1].direction == "an" then
- entry.direction = "an"
- end
- end
- end
- else -- only more efficient when we have es/cs
- local runner = start + 2
- local before = list[start]
- local entry = list[start + 1]
- local after = list[runner]
- while after do
- local direction = entry.direction
- if direction == "es" then
- if before.direction == "en" and after.direction == "en" then
- entry.direction = "en"
- end
- elseif direction == "cs" then
- local prevdirection = before.direction
- if prevdirection == "en" then
- if after.direction == "en" then
- entry.direction = "en"
- end
- elseif prevdirection == "an" and after.direction == "an" then
- entry.direction = "an"
- end
- end
- before = current
- current = after
- after = list[runner]
- runner = runner + 1
- end
- end
--- end
- -- W5
--- if list.et then
- local i = start
- while i <= limit do
- if list[i].direction == "et" then
- local runstart = i
- local runlimit = runstart
- for i=runstart,limit do
- if list[i].direction == "et" then
- runlimit = i
- else
- break
- end
- end
- local rundirection = runstart == start and sor or list[runstart-1].direction
- if rundirection ~= "en" then
- rundirection = runlimit == limit and orderafter or list[runlimit+1].direction
- end
- if rundirection == "en" then
- for j=runstart,runlimit do
- list[j].direction = "en"
- end
- end
- i = runlimit
- end
- i = i + 1
- end
--- end
- -- W6
--- if list.es or list.cs or list.et then
- for i=start,limit do
- local entry = list[i]
- local direction = entry.direction
- if direction == "es" or direction == "et" or direction == "cs" then
- entry.direction = "on"
- end
- end
--- end
- -- W7
- for i=start,limit do
- local entry = list[i]
- if entry.direction == "en" then
- local prev_strong = orderbefore
- for j=i-1,start,-1 do
- local direction = list[j].direction
- if direction == "l" or direction == "r" then
- prev_strong = direction
- break
- end
- end
- if prev_strong == "l" then
- entry.direction = "l"
- end
- end
- end
-end
-
-local function resolve_neutral(list,size,start,limit,orderbefore,orderafter)
- -- N1, N2
- for i=start,limit do
- local entry = list[i]
- if b_s_ws_on[entry.direction] then
- -- this needs checking
- local leading_direction, trailing_direction, resolved_direction
- local runstart = i
- local runlimit = runstart
- for j=runstart+1,limit do
- if b_s_ws_on[list[j].direction] then
- runlimit = j
- else
- break
- end
- end
- if runstart == start then
- leading_direction = orderbefore
- else
- leading_direction = list[runstart-1].direction
- if leading_direction == "en" or leading_direction == "an" then
- leading_direction = "r"
- end
- end
- if runlimit == limit then
- trailing_direction = orderafter
- else
- trailing_direction = list[runlimit+1].direction
- if trailing_direction == "en" or trailing_direction == "an" then
- trailing_direction = "r"
- end
- end
- if leading_direction == trailing_direction then
- -- N1
- resolved_direction = leading_direction
- else
- -- N2 / does the weird period
- resolved_direction = entry.level % 2 == 1 and "r" or "l"
- end
- for j=runstart,runlimit do
- list[j].direction = resolved_direction
- end
- i = runlimit
- end
- i = i + 1
- end
-end
-
-local function resolve_implicit(list,size,start,limit,orderbefore,orderafter,baselevel)
- for i=start,limit do
- local entry = list[i]
- local level = entry.level
- local direction = entry.direction
- if level % 2 ~= 1 then -- even
- -- I1
- if direction == "r" then
- entry.level = level + 1
- elseif direction == "an" or direction == "en" then
- entry.level = level + 2
- end
- else
- -- I2
- if direction == "l" or direction == "en" or direction == "an" then
- entry.level = level + 1
- end
- end
- end
-end
-
-local function resolve_levels(list,size,baselevel,analyze_fences)
- -- X10
- local start = 1
- while start < size do
- local level = list[start].level
- local limit = start + 1
- while limit < size and list[limit].level == level do
- limit = limit + 1
- end
- local prev_level = start == 1 and baselevel or list[start-1].level
- local next_level = limit == size and baselevel or list[limit+1].level
- local orderbefore = (level > prev_level and level or prev_level) % 2 == 1 and "r" or "l"
- local orderafter = (level > next_level and level or next_level) % 2 == 1 and "r" or "l"
- -- W1 .. W7
- resolve_weak(list,size,start,limit,orderbefore,orderafter)
- -- N0
- if analyze_fences then
- resolve_fences(list,size,start,limit)
- end
- -- N1 .. N2
- resolve_neutral(list,size,start,limit,orderbefore,orderafter)
- -- I1 .. I2
- resolve_implicit(list,size,start,limit,orderbefore,orderafter,baselevel)
- start = limit
- end
- -- L1
- for i=1,size do
- local entry = list[i]
- local direction = entry.original
- -- (1)
- if direction == "s" or direction == "b" then
- entry.level = baselevel
- -- (2)
- for j=i-1,1,-1 do
- local entry = list[j]
- if whitespace[entry.original] then
- entry.level = baselevel
- else
- break
- end
- end
- end
- end
- -- (3)
- for i=size,1,-1 do
- local entry = list[i]
- if whitespace[entry.original] then
- entry.level = baselevel
- else
- break
- end
- end
- -- L4
- if analyze_fences then
- for i=1,size do
- local entry = list[i]
- if entry.level % 2 == 1 then -- odd(entry.level)
- if entry.mirror and not entry.paired then
- entry.mirror = false
- end
- -- okay
- elseif entry.mirror then
- entry.mirror = false
- end
- end
- else
- for i=1,size do
- local entry = list[i]
- if entry.level % 2 == 1 then -- odd(entry.level)
- local mirror = mirrordata[entry.char]
- if mirror then
- entry.mirror = mirror
- end
- end
- end
- end
-end
-
-local index = 1
-
-local function process(head,direction)
- local list, size = build_list(head)
- local baselevel = get_baselevel(list,size,direction) -- we always have an inline dir node in context
- resolve_explicit(list,size,baselevel)
- resolve_levels(list,size,baselevel,analyze_fences)
- index = 1
- return list, size
-end
-
--- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
--- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
-
-local utf = lexer.helpers.utfbytepattern
-
--- local t_start = token("default", utf, function(s,i) if i == 1 then index = 1 process(s) end end))
--- local t_bidi = token("error", utf / function() index = index + 1 return list[index].direction == "r" end)
--- local t_rest = token("default", any)
-
--- bidilexer._rules = {
--- { "start", t_start },
--- { "bidi", t_bidi },
--- { "rest", t_rest },
--- }
-
-bidilexer._grammar = #utf * function(s,i)
- process(s)
- local t = { }
- local n = 0
- for i=1,size do
- n = n + 1 t[n] = i
- n = n + 1 t[n] = "error"
- end
- return t
-end
-
-bidilexer._tokenstyles = context.styleset
-
-return bidilexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-bnf.lua b/context/data/textadept/context/lexers/scite-context-lexer-bnf.lua
deleted file mode 100644
index ce57642ba..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-bnf.lua
+++ /dev/null
@@ -1,99 +0,0 @@
-local info = {
- version = 1.001,
- comment = "scintilla lpeg lexer for bnf",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- will replace the one in metafun
-
-local global, lpeg = _G, lpeg
-local P, R, S = lpeg.P, lpeg.R, lpeg.S
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local bnflexer = lexer.new("bnf","scite-context-lexer-bnf")
-local whitespace = bnflexer.whitespace
-
--- from wikipedia:
---
--- <syntax> ::= <rule> | <rule> <syntax>
--- <rule> ::= <opt-whitespace> "<" <rule-name> ">" <opt-whitespace> "::=" <opt-whitespace> <expression> <line-end>
--- <opt-whitespace> ::= " " <opt-whitespace> | ""
--- <expression> ::= <list> | <list> <opt-whitespace> "|" <opt-whitespace> <expression>
--- <line-end> ::= <opt-whitespace> <EOL> | <line-end> <line-end>
--- <list> ::= <term> | <term> <opt-whitespace> <list>
--- <term> ::= <literal> | "<" <rule-name> ">"
--- <literal> ::= '"' <text1> '"' | "'" <text2> "'"
--- <text1> ::= "" | <character1> <text1>
--- <text2> ::= "" | <character2> <text2>
--- <character> ::= <letter> | <digit> | <symbol>
--- <letter> ::= "A" | "B" | "C" | "D" | "E" | "F" | "G" | "H" | "I" | "J" | "K" | "L" | "M" | "N" | "O" | "P" | "Q" | "R" | "S" | "T" | "U" | "V" | "W" | "X" | "Y" | "Z" | "a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | "w" | "x" | "y" | "z"
--- <digit> ::= "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9"
--- <symbol> ::= "|" | " " | "-" | "!" | "#" | "$" | "%" | "&" | "(" | ")" | "*" | "+" | "," | "-" | "." | "/" | ":" | ";" | ">" | "=" | "<" | "?" | "@" | "[" | "\" | "]" | "^" | "_" | "`" | "{" | "}" | "~"
--- <character1> ::= <character> | "'"
--- <character2> ::= <character> | '"'
--- <rule-name> ::= <letter> | <rule-name> <rule-char>
--- <rule-char> ::= <letter> | <digit> | "-"
-
-local anything = patterns.anything
-local separator = P("|")
-local left = P("<")
-local right = P(">")
-local space = S(" \t\n\r\f")
-local spaces = space^1
-local letter = R("AZ","az")
-local digit = R("09")
-local symbol = S([[| -!#$%&()*+,-./:;>=<?@[\]^_`{}~]])
-local text = (letter + digit + symbol^0)
-local name = letter * (letter + digit + P("-"))^0
-local becomes = P("::=")
-local extra = P("|")
-local single = P("'")
-local double = P('"')
-
-local t_spacing = token(whitespace,space^1)
-local t_term = token("command",left)
- * token("text",name)
- * token("command",right)
-local t_text = token("quote",single)
- * token("text",text)
- * token("quote",single)
- + token("quote",double)
- * token("text",text)
- * token("quote",double)
-local t_becomes = token("operator",becomes)
-local t_extra = token("extra",extra)
-local t_rest = token("default",anything)
-
-bnflexer._rules = {
- { "whitespace", t_spacing },
- { "term", t_term },
- { "text", t_text },
- { "becomes", t_becomes },
- { "extra", t_extra },
- { "rest", t_rest },
-}
-
-bnflexer._tokenstyles = context.styleset
-
-bnflexer._foldpattern = left + right
-
-bnflexer._foldsymbols = {
- _patterns = {
- "<",
- ">",
- },
- ["grouping"] = {
- ["<"] = 1,
- [">"] = -1,
- },
-}
-
-return bnflexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-cld.lua b/context/data/textadept/context/lexers/scite-context-lexer-cld.lua
deleted file mode 100644
index 7bda7800e..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-cld.lua
+++ /dev/null
@@ -1,23 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for cld",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local cldlexer = lexer.new("cld","scite-context-lexer-cld")
-local lualexer = lexer.load("scite-context-lexer-lua")
-
--- can probably be done nicer now, a bit of a hack
-
-cldlexer._rules = lualexer._rules_cld
-cldlexer._tokenstyles = lualexer._tokenstyles
-cldlexer._foldsymbols = lualexer._foldsymbols
-cldlexer._directives = lualexer._directives
-
-return cldlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-cpp-web.lua b/context/data/textadept/context/lexers/scite-context-lexer-cpp-web.lua
deleted file mode 100644
index 631a802fe..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-cpp-web.lua
+++ /dev/null
@@ -1,23 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for cpp web",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local cppweblexer = lexer.new("cpp-web","scite-context-lexer-cpp")
-local cpplexer = lexer.load("scite-context-lexer-cpp")
-
--- can probably be done nicer now, a bit of a hack
-
-cppweblexer._rules = cpplexer._rules_web
-cppweblexer._tokenstyles = cpplexer._tokenstyles
-cppweblexer._foldsymbols = cpplexer._foldsymbols
-cppweblexer._directives = cpplexer._directives
-
-return cppweblexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-cpp.lua b/context/data/textadept/context/lexers/scite-context-lexer-cpp.lua
deleted file mode 100644
index a50cdaa17..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-cpp.lua
+++ /dev/null
@@ -1,199 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for cpp",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- looks liks the original cpp lexer but web ready (so nothing special here yet)
-
-local P, R, S = lpeg.P, lpeg.R, lpeg.S
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local cpplexer = lexer.new("cpp","scite-context-lexer-cpp")
-local whitespace = cpplexer.whitespace
-
-local keywords = { -- copied from cpp.lua
- -- c
- "asm", "auto", "break", "case", "const", "continue", "default", "do", "else",
- "extern", "false", "for", "goto", "if", "inline", "register", "return",
- "sizeof", "static", "switch", "true", "typedef", "volatile", "while",
- "restrict",
- -- hm
- "_Bool", "_Complex", "_Pragma", "_Imaginary",
- "boolean",
- -- c++.
- "catch", "class", "const_cast", "delete", "dynamic_cast", "explicit",
- "export", "friend", "mutable", "namespace", "new", "operator", "private",
- "protected", "public", "signals", "slots", "reinterpret_cast",
- "static_assert", "static_cast", "template", "this", "throw", "try", "typeid",
- "typename", "using", "virtual"
-}
-
-local datatypes = { -- copied from cpp.lua
- "bool", "char", "double", "enum", "float", "int", "long", "short", "signed",
- "struct", "union", "unsigned", "void"
-}
-
-local macros = { -- copied from cpp.lua
- "define", "elif", "else", "endif", "error", "if", "ifdef", "ifndef", "import",
- "include", "line", "pragma", "undef", "using", "warning"
-}
-
-local luatexs = {
- "word", "halfword", "quarterword", "scaledwhd", "scaled", "pointer", "glueratio", "strnumber",
- "dumpstream", "memoryword",
-}
-
-local space = patterns.space -- S(" \n\r\t\f\v")
-local any = patterns.any
-local restofline = patterns.restofline
-local startofline = patterns.startofline
-
-local squote = P("'")
-local dquote = P('"')
-local period = P(".")
-local escaped = P("\\") * P(1)
-local slashes = P("//")
-local begincomment = P("/*")
-local endcomment = P("*/")
-local percent = P("%")
-
-local hexadecimal = patterns.hexadecimal
-local decimal = patterns.decimal
-local float = patterns.float
-local integer = P("-")^-1 * (hexadecimal + decimal) -- also in patterns ?
-
-local spacing = token(whitespace, space^1)
-local rest = token("default", any)
-
-local shortcomment = token("comment", slashes * restofline^0)
-local longcomment = token("comment", begincomment * (1-endcomment)^0 * endcomment^-1)
-
-local shortstring = token("quote", dquote) -- can be shared
- * token("string", (escaped + (1-dquote))^0)
- * token("quote", dquote)
- + token("quote", squote)
- * token("string", (escaped + (1-squote))^0)
- * token("quote", squote)
-
-local number = token("number", float + integer)
-
-local validword = R("AZ","az","__") * R("AZ","az","__","09")^0
-local identifier = token("default",validword)
-
-local operator = token("special", S("+-*/%^!=<>;:{}[]().&|?~"))
-
------ optionalspace = spacing^0
-
-local p_keywords = exact_match(keywords)
-local p_datatypes = exact_match(datatypes)
-local p_macros = exact_match(macros)
-local p_luatexs = exact_match(luatexs)
-
-local keyword = token("keyword", p_keywords)
-local datatype = token("keyword", p_datatypes)
-local identifier = token("default", validword)
-local luatex = token("command", p_luatexs)
-
-local macro = token("data", #P("#") * startofline * P("#") * S("\t ")^0 * p_macros)
-
-cpplexer._rules = {
- { "whitespace", spacing },
- { "keyword", keyword },
- { "type", datatype },
- { "luatex", luatex },
- { "identifier", identifier },
- { "string", shortstring },
- { "longcomment", longcomment },
- { "shortcomment", shortcomment },
- { "number", number },
- { "macro", macro },
- { "operator", operator },
- { "rest", rest },
-}
-
-local web = lexer.loadluafile("scite-context-lexer-web-snippets")
-
-if web then
-
- lexer.inform("supporting web snippets in cpp lexer")
-
- cpplexer._rules_web = {
- { "whitespace", spacing },
- { "keyword", keyword },
- { "type", datatype },
- { "luatex", luatex },
- { "identifier", identifier },
- { "string", shortstring },
- { "longcomment", longcomment },
- { "shortcomment", shortcomment },
- { "web", web.pattern },
- { "number", number },
- { "macro", macro },
- { "operator", operator },
- { "rest", rest },
- }
-
-else
-
- lexer.report("not supporting web snippets in cpp lexer")
-
- cpplexer._rules_web = {
- { "whitespace", spacing },
- { "keyword", keyword },
- { "type", datatype },
- { "luatex", luatex },
- { "identifier", identifier },
- { "string", shortstring },
- { "longcomment", longcomment },
- { "shortcomment", shortcomment },
- { "number", number },
- { "macro", macro },
- { "operator", operator },
- { "rest", rest },
- }
-
-end
-
-cpplexer._tokenstyles = context.styleset
-
-cpplexer._foldpattern = P("/*") + P("*/") + S("{}") -- separate entry else interference (singular?)
-
-cpplexer._foldsymbols = {
- _patterns = {
- "[{}]",
- "/%*",
- "%*/",
- },
- -- ["data"] = { -- macro
- -- ["region"] = 1,
- -- ["endregion"] = -1,
- -- ["if"] = 1,
- -- ["ifdef"] = 1,
- -- ["ifndef"] = 1,
- -- ["endif"] = -1,
- -- },
- ["special"] = { -- operator
- ["{"] = 1,
- ["}"] = -1,
- },
- ["comment"] = {
- ["/*"] = 1,
- ["*/"] = -1,
- }
-}
-
--- -- by indentation:
-
-cpplexer._foldpatterns = nil
-cpplexer._foldsymbols = nil
-
-return cpplexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-dummy.lua b/context/data/textadept/context/lexers/scite-context-lexer-dummy.lua
deleted file mode 100644
index 5d3096b7d..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-dummy.lua
+++ /dev/null
@@ -1,35 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer that triggers whitespace backtracking",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- the lexer dll doesn't backtrack when there is no embedded lexer so
--- we need to trigger that, for instance in the bibtex lexer, but still
--- we get failed lexing
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local dummylexer = lexer.new("dummy","scite-context-lexer-dummy")
-local whitespace = dummylexer.whitespace
-
-local space = patterns.space
-local nospace = (1-space)
-
-local t_spacing = token(whitespace, space ^1)
-local t_rest = token("default", nospace^1)
-
-dummylexer._rules = {
- { "whitespace", t_spacing },
- { "rest", t_rest },
-}
-
-dummylexer._tokenstyles = context.styleset
-
-return dummylexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-json.lua b/context/data/textadept/context/lexers/scite-context-lexer-json.lua
deleted file mode 100644
index ca7add07d..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-json.lua
+++ /dev/null
@@ -1,101 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for json",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local global, string, table, lpeg = _G, string, table, lpeg
-local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
-local type = type
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local jsonlexer = lexer.new("json","scite-context-lexer-json")
-local whitespace = jsonlexer.whitespace
-
-local anything = patterns.anything
-local comma = P(",")
-local colon = P(":")
-local escape = P("\\")
------ single = P("'")
-local double = P('"')
-local openarray = P('[')
-local closearray = P(']')
-local openhash = P('{')
-local closehash = P('}')
------ lineending = S("\n\r")
-local space = S(" \t\n\r\f")
-local spaces = space^1
-local operator = S(':,{}[]')
-local fence = openarray + closearray + openhash + closehash
-
-local escape_un = P("\\u") * S("09","AF","af")
-local escape_bs = P("\\") * P(1)
------ content = (escape_un + escape_bs + (1-double))^0
-local content = (escape_bs + (1-double))^0
-
-local reserved = P("true")
- + P("false")
- + P("null")
-
-local integer = P("-")^-1 * (patterns.hexadecimal + patterns.decimal)
-local float = patterns.float
-
-local t_number = token("number", float + integer)
- * (token("error",R("AZ","az","__")^1))^0
-
-local t_spacing = token(whitespace, space^1)
-local t_optionalws = token("default", space^1)^0
-
-local t_operator = token("special", operator)
-
-local t_string = token("operator",double)
- * token("string",content)
- * token("operator",double)
-
-local t_key = token("operator",double)
- * token("text",content)
- * token("operator",double)
- * t_optionalws
- * token("operator",colon)
-
-local t_fences = token("operator",fence) -- grouping
-
-local t_reserved = token("primitive",reserved)
-
-local t_rest = token("default",anything)
-
-jsonlexer._rules = {
- { "whitespace", t_spacing },
- { "reserved", t_reserved },
- { "key", t_key },
- { "number", t_number },
- { "string", t_string },
- { "fences", t_fences },
- { "operator", t_operator },
- { "rest", t_rest },
-}
-
-jsonlexer._tokenstyles = context.styleset
-
-jsonlexer._foldpattern = fence
-
-jsonlexer._foldsymbols = {
- _patterns = {
- "{", "}",
- "[", "]",
- },
- ["grouping"] = {
- ["{"] = 1, ["}"] = -1,
- ["["] = 1, ["]"] = -1,
- },
-}
-
-return jsonlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-lua-longstring.lua b/context/data/textadept/context/lexers/scite-context-lexer-lua-longstring.lua
deleted file mode 100644
index b1304f65c..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-lua-longstring.lua
+++ /dev/null
@@ -1,31 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for lua longstrings",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local stringlexer = lexer.new("lua-longstring","scite-context-lexer-lua-longstring")
-local whitespace = stringlexer.whitespace
-
-local space = patterns.space
-local nospace = 1 - space
-
-local p_spaces = token(whitespace, space ^1)
-local p_string = token("string", nospace^1)
-
-stringlexer._rules = {
- { "whitespace", p_spaces },
- { "string", p_string },
-}
-
-stringlexer._tokenstyles = context.styleset
-
-return stringlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-lua.lua b/context/data/textadept/context/lexers/scite-context-lexer-lua.lua
deleted file mode 100644
index 0e54d56ba..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-lua.lua
+++ /dev/null
@@ -1,396 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- beware: all multiline is messy, so even if it's no lexer, it should be an embedded lexer
--- we probably could use a local whitespace variant but this is cleaner
-
-local P, R, S, C, Cmt, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cmt, lpeg.Cp
-local match, find = string.match, string.find
-local setmetatable = setmetatable
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-local just_match = lexer.just_match
-
-local lualexer = lexer.new("lua","scite-context-lexer-lua")
-local whitespace = lualexer.whitespace
-
-local stringlexer = lexer.load("scite-context-lexer-lua-longstring")
------ labellexer = lexer.load("scite-context-lexer-lua-labelstring")
-
-local directives = { } -- communication channel
-
--- this will be extended
-
--- we could combine some in a hash that returns the class that then makes the token
--- this can save time on large files
-
-local keywords = {
- "and", "break", "do", "else", "elseif", "end", "false", "for", "function", -- "goto",
- "if", "in", "local", "nil", "not", "or", "repeat", "return", "then", "true",
- "until", "while",
-}
-
-local functions = {
- "assert", "collectgarbage", "dofile", "error", "getmetatable",
- "ipairs", "load", "loadfile", "module", "next", "pairs",
- "pcall", "print", "rawequal", "rawget", "rawset", "require",
- "setmetatable", "tonumber", "tostring", "type", "unpack", "xpcall", "select",
-
- "string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32", "utf8",
-}
-
-local constants = {
- "_G", "_VERSION", "_M", "...", "_ENV",
- -- here too
- "__add", "__call", "__concat", "__div", "__idiv", "__eq", "__gc", "__index",
- "__le", "__lt", "__metatable", "__mode", "__mul", "__newindex",
- "__pow", "__sub", "__tostring", "__unm", "__len",
- "__pairs", "__ipairs",
- "__close",
- "NaN",
- "<const>", "<toclose>",
-}
-
--- local tokenmappings = { }
---
--- for i=1,#keywords do tokenmappings[keywords [i]] = "keyword" }
--- for i=1,#functions do tokenmappings[functions[i]] = "function" }
--- for i=1,#constants do tokenmappings[constants[i]] = "constant" }
-
-local internals = { -- __
- "add", "call", "concat", "div", "idiv", "eq", "gc", "index",
- "le", "lt", "metatable", "mode", "mul", "newindex",
- "pow", "sub", "tostring", "unm", "len",
- "pairs", "ipairs",
- "close",
-}
-
-local depricated = {
- "arg", "arg.n",
- "loadstring", "setfenv", "getfenv",
- "pack",
-}
-
-local csnames = { -- todo: option
- "commands",
- "context",
- -- "ctxcmd",
- -- "ctx",
- "metafun",
- "metapost",
-}
-
-local level = nil
-local setlevel = function(_,i,s) level = s return i end
-
-local equals = P("=")^0
-
-local longonestart = P("[[")
-local longonestop = P("]]")
-local longonestring = (1-longonestop)^0
-
-local longtwostart = P("[") * Cmt(equals,setlevel) * P("[")
-local longtwostop = P("]") * equals * P("]")
-
-local sentinels = { } setmetatable(sentinels, { __index = function(t,k) local v = "]" .. k .. "]" t[k] = v return v end })
-
-local longtwostring = P(function(input,index)
- if level then
- -- local sentinel = "]" .. level .. "]"
- local sentinel = sentinels[level]
- local _, stop = find(input,sentinel,index,true)
- return stop and stop + 1 - #sentinel or #input + 1
- end
-end)
-
- local longtwostring_body = longtwostring
-
- local longtwostring_end = P(function(input,index)
- if level then
- -- local sentinel = "]" .. level .. "]"
- local sentinel = sentinels[level]
- local _, stop = find(input,sentinel,index,true)
- return stop and stop + 1 or #input + 1
- end
- end)
-
-local longcomment = Cmt(#("[[" + ("[" * C(equals) * "[")), function(input,index,level)
- -- local sentinel = "]" .. level .. "]"
- local sentinel = sentinels[level]
- local _, stop = find(input,sentinel,index,true)
- return stop and stop + 1 or #input + 1
-end)
-
-local space = patterns.space -- S(" \n\r\t\f\v")
-local any = patterns.any
-local eol = patterns.eol
-
-local squote = P("'")
-local dquote = P('"')
-local escaped = P("\\") * P(1)
-local dashes = P("--")
-
-local spacing = token(whitespace, space^1)
-local rest = token("default", any)
-
-local shortcomment = token("comment", dashes * (1-eol)^0)
-local longcomment = token("comment", dashes * longcomment)
-
--- fails on very long string with \ at end of lines (needs embedded lexer)
--- and also on newline before " but it makes no sense to waste time on it
-
-local shortstring = token("quote", dquote)
- * token("string", (escaped + (1-dquote))^0)
- * token("quote", dquote)
- + token("quote", squote)
- * token("string", (escaped + (1-squote))^0)
- * token("quote", squote)
-
------ longstring = token("quote", longonestart)
------ * token("string", longonestring)
------ * token("quote", longonestop)
------ + token("quote", longtwostart)
------ * token("string", longtwostring)
------ * token("quote", longtwostop)
-
-local string = shortstring
------ + longstring
-
-lexer.embed_lexer(lualexer, stringlexer, token("quote",longtwostart), token("string",longtwostring_body) * token("quote",longtwostring_end))
-
-local integer = P("-")^-1 * (patterns.hexadecimal + patterns.decimal)
-local number = token("number", patterns.float + integer)
- * (token("error",R("AZ","az","__")^1))^0
-
--- officially 127-255 are ok but not utf so useless
-
------ validword = R("AZ","az","__") * R("AZ","az","__","09")^0
-
-local utf8character = P(1) * R("\128\191")^1
-local validword = (R("AZ","az","__") + utf8character) * (R("AZ","az","__","09") + utf8character)^0
-local validsuffix = (R("AZ","az") + utf8character) * (R("AZ","az","__","09") + utf8character)^0
-
-local identifier = token("default",validword)
-
------ operator = token("special", P('..') + P('~=') + S('+-*/%^#=<>;:,.{}[]()')) -- maybe split off {}[]()
------ operator = token("special", S('+-*/%^#=<>;:,{}[]()') + P('..') + P('.') + P('~=') ) -- maybe split off {}[]()
------ operator = token("special", S('+-*/%^#=<>;:,{}[]().') + P('~=') ) -- no ^1 because of nested lexers
-local operator = token("special", S('+-*/%^#=<>;:,{}[]().|~')) -- no ^1 because of nested lexers
-
-local structure = token("special", S('{}[]()'))
-
-local optionalspace = spacing^0
-local hasargument = #S("{([")
-
--- ideal should be an embedded lexer ..
-
-local gotokeyword = token("keyword", P("goto"))
- * spacing
- * token("grouping",validword)
-local gotolabel = token("keyword", P("::"))
- * (spacing + shortcomment)^0
- * token("grouping",validword)
- * (spacing + shortcomment)^0
- * token("keyword", P("::"))
-
------ p_keywords = exact_match(keywords)
------ p_functions = exact_match(functions)
------ p_constants = exact_match(constants)
------ p_internals = P("__")
------ * exact_match(internals)
-
-local p_finish = #(1-R("az","AZ","__"))
-local p_keywords = lexer.helpers.utfchartabletopattern(keywords) * p_finish -- exact_match(keywords)
-local p_functions = lexer.helpers.utfchartabletopattern(functions) * p_finish -- exact_match(functions)
-local p_constants = lexer.helpers.utfchartabletopattern(constants) * p_finish -- exact_match(constants)
-local p_internals = P("__")
- * lexer.helpers.utfchartabletopattern(internals) * p_finish -- exact_match(internals)
-
-local p_csnames = lexer.helpers.utfchartabletopattern(csnames) -- * p_finish -- just_match(csnames)
-local p_ctnames = P("ctx") * R("AZ","az","__")^0
-local keyword = token("keyword", p_keywords)
-local builtin = token("plain", p_functions)
-local constant = token("data", p_constants)
-local internal = token("data", p_internals)
-local csname = token("user", p_csnames + p_ctnames)
- * p_finish * optionalspace * (
- hasargument
- + ( token("special", S(".:")) * optionalspace * token("user", validword) )^1
- )^-1
-
--- we could also check S(".:") * p_keyword etc, could be faster
-
-local identifier = token("default", validword)
- * ( optionalspace * token("special", S(".:")) * optionalspace * (
- token("warning", p_keywords) +
- token("data", p_internals) + -- needs checking
- token("default", validword )
- ) )^0
-
--- local t = { } for k, v in next, tokenmappings do t[#t+1] = k end t = table.concat(t)
--- -- local experimental = (S(t)^1) / function(s) return tokenmappings[s] end * Cp()
---
--- local experimental = Cmt(S(t)^1, function(_,i,s)
--- local t = tokenmappings[s]
--- if t then
--- return true, t, i
--- end
--- end)
-
-lualexer._rules = {
- { "whitespace", spacing },
- { "keyword", keyword }, -- can be combined
- -- { "structure", structure },
- { "function", builtin }, -- can be combined
- { "constant", constant }, -- can be combined
- -- { "experimental", experimental }, -- works but better split
- { "csname", csname },
- { "goto", gotokeyword },
- { "identifier", identifier },
- { "string", string },
- { "number", number },
- { "longcomment", longcomment },
- { "shortcomment", shortcomment },
- { "label", gotolabel },
- { "operator", operator },
- { "rest", rest },
-}
-
--- -- experiment
---
--- local idtoken = R("az","AZ","__")
---
--- function context.one_of_match(specification)
--- local pattern = idtoken -- the concat catches _ etc
--- local list = { }
--- for i=1,#specification do
--- local style = specification[i][1]
--- local words = specification[i][2]
--- pattern = pattern + S(table.concat(words))
--- for i=1,#words do
--- list[words[i]] = style
--- end
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- local style = list[s]
--- if style then
--- return true, { style, i } -- and i or nil
--- else
--- -- fail
--- end
--- end)
--- end
---
--- local whatever = context.one_of_match {
--- { "keyword", keywords }, -- keyword
--- { "plain", functions }, -- builtin
--- { "data", constants }, -- constant
--- }
---
--- lualexer._rules = {
--- { "whitespace", spacing },
--- { "whatever", whatever },
--- { "csname", csname },
--- { "goto", gotokeyword },
--- { "identifier", identifier },
--- { "string", string },
--- { "number", number },
--- { "longcomment", longcomment },
--- { "shortcomment", shortcomment },
--- { "label", gotolabel },
--- { "operator", operator },
--- { "rest", rest },
--- }
-
-lualexer._tokenstyles = context.styleset
-
--- lualexer._foldpattern = R("az")^2 + S("{}[]") -- separate entry else interference
-
-lualexer._foldpattern = (P("end") + P("if") + P("do") + P("function") + P("repeat") + P("until")) * P(#(1 - R("az")))
- + S("{}[]")
-
-lualexer._foldsymbols = {
- _patterns = {
- "[a-z][a-z]+",
- "[{}%[%]]",
- },
- ["keyword"] = { -- challenge: if=0 then=1 else=-1 elseif=-1
- ["if"] = 1, -- if .. [then|else] .. end
- ["do"] = 1, -- [while] do .. end
- ["function"] = 1, -- function .. end
- ["repeat"] = 1, -- repeat .. until
- ["until"] = -1,
- ["end"] = -1,
- },
- ["comment"] = {
- ["["] = 1, ["]"] = -1,
- },
- -- ["quote"] = { -- confusing
- -- ["["] = 1, ["]"] = -1,
- -- },
- ["special"] = {
- -- ["("] = 1, [")"] = -1,
- ["{"] = 1, ["}"] = -1,
- },
-}
-
--- embedded in tex:
-
-local cstoken = R("az","AZ","\127\255") + S("@!?_")
-local texcsname = P("\\") * cstoken^1
-local commentline = P("%") * (1-S("\n\r"))^0
-
-local texcomment = token("comment", Cmt(commentline, function() return directives.cld_inline end))
-
-local longthreestart = P("\\!!bs")
-local longthreestop = P("\\!!es")
-local longthreestring = (1-longthreestop)^0
-
-local texstring = token("quote", longthreestart)
- * token("string", longthreestring)
- * token("quote", longthreestop)
-
------ texcommand = token("user", texcsname)
-local texcommand = token("warning", texcsname)
-
--- local texstring = token("quote", longthreestart)
--- * (texcommand + token("string",P(1-texcommand-longthreestop)^1) - longthreestop)^0 -- we match long non-\cs sequences
--- * token("quote", longthreestop)
-
--- local whitespace = "whitespace"
--- local spacing = token(whitespace, space^1)
-
-lualexer._directives = directives
-
-lualexer._rules_cld = {
- { "whitespace", spacing },
- { "texstring", texstring },
- { "texcomment", texcomment },
- { "texcommand", texcommand },
- -- { "structure", structure },
- { "keyword", keyword },
- { "function", builtin },
- { "csname", csname },
- { "goto", gotokeyword },
- { "constant", constant },
- { "identifier", identifier },
- { "string", string },
- { "longcomment", longcomment },
- { "shortcomment", shortcomment }, -- should not be used inline so best signal it as comment (otherwise complex state till end of inline)
- { "number", number },
- { "label", gotolabel },
- { "operator", operator },
- { "rest", rest },
-}
-
-return lualexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-mps.lua b/context/data/textadept/context/lexers/scite-context-lexer-mps.lua
deleted file mode 100644
index 356bf1f6b..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-mps.lua
+++ /dev/null
@@ -1,189 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local global, string, table, lpeg = _G, string, table, lpeg
-local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
-local type = type
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local metafunlexer = lexer.new("mps","scite-context-lexer-mps")
-local whitespace = metafunlexer.whitespace
-
-local metapostprimitives = { }
-local metapostinternals = { }
-local metapostshortcuts = { }
-local metapostcommands = { }
-
-local metafuninternals = { }
-local metafunshortcuts = { }
-local metafuncommands = { }
-
-local mergedshortcuts = { }
-local mergedinternals = { }
-
-do
-
- local definitions = context.loaddefinitions("scite-context-data-metapost")
-
- if definitions then
- metapostprimitives = definitions.primitives or { }
- metapostinternals = definitions.internals or { }
- metapostshortcuts = definitions.shortcuts or { }
- metapostcommands = definitions.commands or { }
- end
-
- local definitions = context.loaddefinitions("scite-context-data-metafun")
-
- if definitions then
- metafuninternals = definitions.internals or { }
- metafunshortcuts = definitions.shortcuts or { }
- metafuncommands = definitions.commands or { }
- end
-
- for i=1,#metapostshortcuts do
- mergedshortcuts[#mergedshortcuts+1] = metapostshortcuts[i]
- end
- for i=1,#metafunshortcuts do
- mergedshortcuts[#mergedshortcuts+1] = metafunshortcuts[i]
- end
-
- for i=1,#metapostinternals do
- mergedinternals[#mergedinternals+1] = metapostinternals[i]
- end
- for i=1,#metafuninternals do
- mergedinternals[#mergedinternals+1] = metafuninternals[i]
- end
-
-end
-
-local space = patterns.space -- S(" \n\r\t\f\v")
-local any = patterns.any
-
-local dquote = P('"')
-local cstoken = patterns.idtoken
-local mptoken = patterns.alpha
-local leftbrace = P("{")
-local rightbrace = P("}")
-local number = patterns.real
-
-local cstokentex = R("az","AZ","\127\255") + S("@!?_")
-
--- we could collapse as in tex
-
-local spacing = token(whitespace, space^1)
-local rest = token("default", any)
-local comment = token("comment", P("%") * (1-S("\n\r"))^0)
-local internal = token("reserved", exact_match(mergedshortcuts,false))
-local shortcut = token("data", exact_match(mergedinternals))
-
-local helper = token("command", exact_match(metafuncommands))
-local plain = token("plain", exact_match(metapostcommands))
-local quoted = token("quote", dquote)
- * token("string", P(1-dquote)^0)
- * token("quote", dquote)
-local separator = P(" ") + S("\n\r")^1
-local btex = (P("btex") + P("verbatimtex")) * separator
-local etex = separator * P("etex")
-local texstuff = token("quote", btex)
- * token("string", (1-etex)^0)
- * token("quote", etex)
-local primitive = token("primitive", exact_match(metapostprimitives))
-local identifier = token("default", cstoken^1)
-local number = token("number", number)
-local grouping = token("grouping", S("()[]{}")) -- can be an option
-local suffix = token("number", P("#@") + P("@#") + P("#"))
-local special = token("special", P("#@") + P("@#") + S("#()[]{}<>=:\"")) -- or else := <> etc split
-local texlike = token("warning", P("\\") * cstokentex^1)
-local extra = token("extra", P("+-+") + P("++") + S("`~%^&_-+*/\'|\\"))
-
-local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
-local texlike = token("embedded", P("\\") * (P("MP") + P("mp")) * mptoken^1)
- * spacing^0
- * token("grouping", leftbrace)
- * token("default", (nested + (1-rightbrace))^0 )
- * token("grouping", rightbrace)
- + token("warning", P("\\") * cstokentex^1)
-
--- lua: we assume: lua ( "lua code" )
-
-local cldlexer = lexer.load("scite-context-lexer-cld","mps-cld")
-
-local startlua = P("lua") * space^0 * P('(') * space^0 * P('"')
-local stoplua = P('"') * space^0 * P(')')
-
-local startluacode = token("embedded", startlua)
-local stopluacode = #stoplua * token("embedded", stoplua)
-
-lexer.embed_lexer(metafunlexer, cldlexer, startluacode, stopluacode)
-
-local luacall = token("embedded",P("lua") * ( P(".") * R("az","AZ","__")^1 )^1)
-
-local keyword = token("default", (R("AZ","az","__")^1) * # P(space^0 * P("=")))
-
-metafunlexer._rules = {
- { "whitespace", spacing },
- { "comment", comment },
- { "keyword", keyword }, -- experiment, maybe to simple
- { "internal", internal },
- { "shortcut", shortcut },
- { "luacall", luacall },
- { "helper", helper },
- { "plain", plain },
- { "primitive", primitive },
- { "texstuff", texstuff },
- { "suffix", suffix },
- { "identifier", identifier },
- { "number", number },
- { "quoted", quoted },
- -- { "grouping", grouping }, -- can be an option
- { "special", special },
- { "texlike", texlike },
- { "extra", extra },
- { "rest", rest },
-}
-
-metafunlexer._tokenstyles = context.styleset
-
-metafunlexer._foldpattern = patterns.lower^2 -- separate entry else interference
-
-metafunlexer._foldsymbols = {
- _patterns = {
- "[a-z][a-z]+",
- },
- ["plain"] = {
- ["beginfig"] = 1,
- ["endfig"] = -1,
- ["beginglyph"] = 1,
- ["endglyph"] = -1,
- -- ["begingraph"] = 1,
- -- ["endgraph"] = -1,
- },
- ["primitive"] = {
- ["def"] = 1,
- ["vardef"] = 1,
- ["primarydef"] = 1,
- ["secondarydef" ] = 1,
- ["tertiarydef"] = 1,
- ["enddef"] = -1,
- ["if"] = 1,
- ["fi"] = -1,
- ["for"] = 1,
- ["forever"] = 1,
- ["endfor"] = -1,
- }
-}
-
--- if inspect then inspect(metafunlexer) end
-
-return metafunlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-pdf-object.lua b/context/data/textadept/context/lexers/scite-context-lexer-pdf-object.lua
deleted file mode 100644
index 155a9bd51..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-pdf-object.lua
+++ /dev/null
@@ -1,136 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for pdf objects",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- no longer used: nesting lexers with whitespace in start/stop is unreliable
-
-local P, R, S, C, V = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.V
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local pdfobjectlexer = lexer.new("pdfobj","scite-context-lexer-pdf-object")
-local whitespace = pdfobjectlexer.whitespace
-
-local space = patterns.space
-local spacing = patterns.spacing
-local nospacing = patterns.nospacing
-local anything = patterns.anything
-local newline = patterns.eol
-local real = patterns.real
-local cardinal = patterns.cardinal
-
-local lparent = P("(")
-local rparent = P(")")
-local langle = P("<")
-local rangle = P(">")
-local escape = P("\\")
-local unicodetrigger = P("feff")
-
-local nametoken = 1 - space - S("<>/[]()")
-local name = P("/") * nametoken^1
-
-local p_string = P { ( escape * anything + lparent * V(1) * rparent + (1 - rparent) )^0 }
-
-local t_spacing = token(whitespace, spacing)
-local t_spaces = token(whitespace, spacing)^0
-local t_rest = token("default", nospacing) -- anything
-
-local p_stream = P("stream")
-local p_endstream = P("endstream")
-local p_obj = P("obj")
-local p_endobj = P("endobj")
-local p_reference = P("R")
-
-local p_objectnumber = patterns.cardinal
-local p_comment = P("%") * (1-S("\n\r"))^0
-
-local t_string = token("quote", lparent)
- * token("string", p_string)
- * token("quote", rparent)
-local t_unicode = token("quote", langle)
- * token("plain", unicodetrigger)
- * token("string", (1-rangle)^1)
- * token("quote", rangle)
-local t_whatsit = token("quote", langle)
- * token("string", (1-rangle)^1)
- * token("quote", rangle)
-local t_keyword = token("command", name)
-local t_constant = token("constant", name)
-local t_number = token("number", real)
--- t_reference = token("number", cardinal)
--- * t_spacing
--- * token("number", cardinal)
-local t_reserved = token("number", P("true") + P("false") + P("NULL"))
-local t_reference = token("warning", cardinal)
- * t_spacing
- * token("warning", cardinal)
- * t_spacing
- * token("keyword", p_reference)
-
-local t_comment = token("comment", p_comment)
-
-local t_openobject = token("warning", p_objectnumber * spacing)
--- * t_spacing
- * token("warning", p_objectnumber * spacing)
--- * t_spacing
- * token("keyword", p_obj)
-local t_closeobject = token("keyword", p_endobj)
-
-local t_opendictionary = token("grouping", P("<<"))
-local t_closedictionary = token("grouping", P(">>"))
-
-local t_openarray = token("grouping", P("["))
-local t_closearray = token("grouping", P("]"))
-
--- todo: comment
-
-local t_stream = token("keyword", p_stream)
--- * token("default", newline * (1-newline*p_endstream*newline)^1 * newline)
--- * token("text", (1 - p_endstream)^1)
- * (token("text", (1 - p_endstream-spacing)^1) + t_spacing)^1
- * token("keyword", p_endstream)
-
-local t_dictionary = { "dictionary",
- dictionary = t_opendictionary * (t_spaces * t_keyword * t_spaces * V("whatever"))^0 * t_spaces * t_closedictionary,
- array = t_openarray * (t_spaces * V("whatever"))^0 * t_spaces * t_closearray,
- whatever = V("dictionary") + V("array") + t_constant + t_reference + t_string + t_unicode + t_number + t_reserved + t_whatsit,
- }
-
------ t_object = { "object", -- weird that we need to catch the end here (probably otherwise an invalid lpeg)
------ object = t_spaces * (V("dictionary") * t_spaces * t_stream^-1 + V("array") + V("number") + t_spaces) * t_spaces * t_closeobject,
------ dictionary = t_opendictionary * (t_spaces * t_keyword * t_spaces * V("whatever"))^0 * t_spaces * t_closedictionary,
------ array = t_openarray * (t_spaces * V("whatever"))^0 * t_spaces * t_closearray,
------ whatever = V("dictionary") + V("array") + t_constant + t_reference + t_string + t_unicode + t_number + t_reserved + t_whatsit,
------ number = t_number,
------ }
-
-local t_object = { "object", -- weird that we need to catch the end here (probably otherwise an invalid lpeg)
- dictionary = t_dictionary.dictionary,
- array = t_dictionary.array,
- whatever = t_dictionary.whatever,
- object = t_openobject^-1 * t_spaces * (V("dictionary") * t_spaces * t_stream^-1 + V("array") + V("number") + t_spaces) * t_spaces * t_closeobject,
- number = t_number,
- }
-
-pdfobjectlexer._shared = {
- dictionary = t_dictionary,
- object = t_object,
- stream = t_stream,
-}
-
-pdfobjectlexer._rules = {
- { "whitespace", t_spacing }, -- in fact, here we don't want whitespace as it's top level lexer work
- { "object", t_object },
-}
-
-pdfobjectlexer._tokenstyles = context.styleset
-
-return pdfobjectlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-pdf-xref.lua b/context/data/textadept/context/lexers/scite-context-lexer-pdf-xref.lua
deleted file mode 100644
index 14ba5296b..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-pdf-xref.lua
+++ /dev/null
@@ -1,43 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for pdf xref",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- no longer used: nesting lexers with whitespace in start/stop is unreliable
-
-local P, R = lpeg.P, lpeg.R
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local pdfxreflexer = lexer.new("pdfxref","scite-context-lexer-pdf-xref")
-local whitespace = pdfxreflexer.whitespace
-
-local spacing = patterns.spacing
-local cardinal = patterns.cardinal
-local alpha = patterns.alpha
-
-local t_spacing = token(whitespace, spacing)
-
-local p_xref = P("xref")
-local t_xref = token("keyword",p_xref)
- * token("number", cardinal * spacing * cardinal * spacing)
-
-local t_number = token("number", cardinal * spacing * cardinal * spacing)
- * token("keyword", alpha)
-
-pdfxreflexer._rules = {
- { "whitespace", t_spacing },
- { "xref", t_xref },
- { "number", t_number },
-}
-
-pdfxreflexer._tokenstyles = context.styleset
-
-return pdfxreflexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-pdf.lua b/context/data/textadept/context/lexers/scite-context-lexer-pdf.lua
deleted file mode 100644
index 1956071b7..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-pdf.lua
+++ /dev/null
@@ -1,218 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for pdf",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- pdf is normally static .. i.e. not edited so we don't really
--- need embedded lexers.
-
-local P, R, S, V = lpeg.P, lpeg.R, lpeg.S, lpeg.V
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local pdflexer = lexer.new("pdf","scite-context-lexer-pdf")
-local whitespace = pdflexer.whitespace
-
------ pdfobjectlexer = lexer.load("scite-context-lexer-pdf-object")
------ pdfxreflexer = lexer.load("scite-context-lexer-pdf-xref")
-
-local anything = patterns.anything
-local space = patterns.space
-local spacing = patterns.spacing
-local nospacing = patterns.nospacing
-local anything = patterns.anything
-local restofline = patterns.restofline
-
-local t_whitespace = token(whitespace, spacing)
-local t_spacing = token("default", spacing)
------ t_rest = token("default", nospacing)
-local t_rest = token("default", anything)
-
-local p_comment = P("%") * restofline
-local t_comment = token("comment", p_comment)
-
--- whatever
-
-local space = patterns.space
-local spacing = patterns.spacing
-local nospacing = patterns.nospacing
-local anything = patterns.anything
-local newline = patterns.eol
-local real = patterns.real
-local cardinal = patterns.cardinal
-local alpha = patterns.alpha
-
-local lparent = P("(")
-local rparent = P(")")
-local langle = P("<")
-local rangle = P(">")
-local escape = P("\\")
-local unicodetrigger = P("feff")
-
-local nametoken = 1 - space - S("<>/[]()")
-local name = P("/") * nametoken^1
-
-local p_string = P { ( escape * anything + lparent * V(1) * rparent + (1 - rparent) )^0 }
-
-local t_spacing = token("default", spacing)
-local t_spaces = token("default", spacing)^0
-local t_rest = token("default", nospacing) -- anything
-
-local p_stream = P("stream")
-local p_endstream = P("endstream")
-local p_obj = P("obj")
-local p_endobj = P("endobj")
-local p_reference = P("R")
-
-local p_objectnumber = patterns.cardinal
-local p_comment = P("%") * (1-S("\n\r"))^0
-
-local t_string = token("quote", lparent)
- * token("string", p_string)
- * token("quote", rparent)
-local t_unicode = token("quote", langle)
- * token("plain", unicodetrigger)
- * token("string", (1-rangle)^1)
- * token("quote", rangle)
-local t_whatsit = token("quote", langle)
- * token("string", (1-rangle)^1)
- * token("quote", rangle)
-local t_keyword = token("command", name)
-local t_constant = token("constant", name)
-local t_number = token("number", real)
--- t_reference = token("number", cardinal)
--- * t_spacing
--- * token("number", cardinal)
-local t_reserved = token("number", P("true") + P("false") + P("null"))
--- t_reference = token("warning", cardinal * spacing * cardinal * spacing)
--- * token("keyword", p_reference)
-local t_reference = token("warning", cardinal)
- * t_spacing
- * token("warning", cardinal)
- * t_spacing
- * token("keyword", p_reference)
-
-local t_comment = token("comment", p_comment)
-
-local t_openobject = token("warning", p_objectnumber)
- * t_spacing
- * token("warning", p_objectnumber)
- * t_spacing
- * token("keyword", p_obj)
--- t_openobject = token("warning", p_objectnumber * spacing)
--- * token("warning", p_objectnumber * spacing)
--- * token("keyword", p_obj)
-local t_closeobject = token("keyword", p_endobj)
-
-local t_opendictionary = token("grouping", P("<<"))
-local t_closedictionary = token("grouping", P(">>"))
-
-local t_openarray = token("grouping", P("["))
-local t_closearray = token("grouping", P("]"))
-
-local t_stream = token("keyword", p_stream)
- * token("text", (1 - p_endstream)^1)
- * token("keyword", p_endstream)
-
-local t_other = t_constant + t_reference + t_string + t_unicode + t_number + t_reserved + t_whatsit
-
-local t_dictionary = { "dictionary",
- dictionary = t_opendictionary
- * (t_spaces * t_keyword * t_spaces * V("whatever"))^0
- * t_spaces
- * t_closedictionary,
- array = t_openarray
- * (t_spaces * V("whatever"))^0
- * t_spaces
- * t_closearray,
- whatever = V("dictionary")
- + V("array")
- + t_other,
- }
-
-local t_object = { "object", -- weird that we need to catch the end here (probably otherwise an invalid lpeg)
- dictionary = t_dictionary.dictionary,
- array = t_dictionary.array,
- whatever = t_dictionary.whatever,
- object = t_openobject
- * t_spaces
- * (V("dictionary") * t_spaces * t_stream^-1 + V("array") + t_other)
- * t_spaces
- * t_closeobject,
- number = t_number,
- }
-
--- objects ... sometimes NUL characters play havoc ... and in xref we have
--- issues with embedded lexers that have spaces in the start and stop
--- conditions and this cannot be handled well either ... so, an imperfect
--- solution ... but anyway, there is not that much that can end up in
--- the root of the tree see we're sort of safe
-
-local p_trailer = P("trailer")
-local t_trailer = token("keyword", p_trailer)
- * t_spacing
- * t_dictionary
--- t_trailer = token("keyword", p_trailer * spacing)
--- * t_dictionary
-
-local p_startxref = P("startxref")
-local t_startxref = token("keyword", p_startxref)
- * t_spacing
- * token("number", cardinal)
--- t_startxref = token("keyword", p_startxref * spacing)
--- * token("number", cardinal)
-
-local p_xref = P("xref")
-local t_xref = token("keyword",p_xref)
- * t_spacing
- * token("number", cardinal)
- * t_spacing
- * token("number", cardinal)
- * spacing
--- t_xref = token("keyword",p_xref)
--- * token("number", spacing * cardinal * spacing * cardinal * spacing)
-
-local t_number = token("number", cardinal)
- * t_spacing
- * token("number", cardinal)
- * t_spacing
- * token("keyword", S("fn"))
--- t_number = token("number", cardinal * spacing * cardinal * spacing)
--- * token("keyword", S("fn"))
-
-pdflexer._rules = {
- { "whitespace", t_whitespace },
- { "object", t_object },
- { "comment", t_comment },
- { "trailer", t_trailer },
- { "startxref", t_startxref },
- { "xref", t_xref },
- { "number", t_number },
- { "rest", t_rest },
-}
-
-pdflexer._tokenstyles = context.styleset
-
--- lexer.inspect(pdflexer)
-
--- collapser: obj endobj stream endstream
-
-pdflexer._foldpattern = p_obj + p_endobj + p_stream + p_endstream
-
-pdflexer._foldsymbols = {
- ["keyword"] = {
- ["obj"] = 1,
- ["endobj"] = -1,
- ["stream"] = 1,
- ["endstream"] = -1,
- },
-}
-
-return pdflexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-sas.lua b/context/data/textadept/context/lexers/scite-context-lexer-sas.lua
deleted file mode 100644
index e36569911..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-sas.lua
+++ /dev/null
@@ -1,102 +0,0 @@
-local info = {
- version = 1.001,
- comment = "scintilla lpeg lexer for sas",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- todo: make this ok for the sas syntax as now it's sql
-
-local P, R, S = lpeg.P, lpeg.R, lpeg.S
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local saslexer = lexer.new("sas","scite-context-lexer-sAs")
-local whitespace = saslexer.whitespace
-
-local keywords_standard = {
- "anova" , "data", "run", "proc",
-}
-
-local keywords_dialects = {
- "class" , "do", "end" , "int" , "for" , "model" , "rannor" , "to" , "output"
-}
-
-local space = patterns.space -- S(" \n\r\t\f\v")
-local any = patterns.any
-local restofline = patterns.restofline
-local startofline = patterns.startofline
-
-local squote = P("'")
-local dquote = P('"')
-local bquote = P('`')
-local escaped = P("\\") * P(1)
-
-local begincomment = P("/*")
-local endcomment = P("*/")
-
-local decimal = patterns.decimal
-local float = patterns.float
-local integer = P("-")^-1 * decimal
-
-local spacing = token(whitespace, space^1)
-local rest = token("default", any)
-
-local shortcomment = token("comment", (P("#") + P("--")) * restofline^0)
-local longcomment = token("comment", begincomment * (1-endcomment)^0 * endcomment^-1)
-
-local identifier = token("default",lexer.helpers.utfidentifier)
-
-local shortstring = token("quote", dquote) -- can be shared
- * token("string", (escaped + (1-dquote))^0)
- * token("quote", dquote)
- + token("quote", squote)
- * token("string", (escaped + (1-squote))^0)
- * token("quote", squote)
- + token("quote", bquote)
- * token("string", (escaped + (1-bquote))^0)
- * token("quote", bquote)
-
-local p_keywords_s = exact_match(keywords_standard,nil,true)
-local p_keywords_d = exact_match(keywords_dialects,nil,true)
-local keyword_s = token("keyword", p_keywords_s)
-local keyword_d = token("command", p_keywords_d)
-
-local number = token("number", float + integer)
-local operator = token("special", S("+-*/%^!=<>;:{}[]().&|?~"))
-
-saslexer._tokenstyles = context.styleset
-
-saslexer._foldpattern = P("/*") + P("*/") + S("{}") -- separate entry else interference
-
-saslexer._foldsymbols = {
- _patterns = {
- "/%*",
- "%*/",
- },
- ["comment"] = {
- ["/*"] = 1,
- ["*/"] = -1,
- }
-}
-
-saslexer._rules = {
- { "whitespace", spacing },
- { "keyword-s", keyword_s },
- { "keyword-d", keyword_d },
- { "identifier", identifier },
- { "string", shortstring },
- { "longcomment", longcomment },
- { "shortcomment", shortcomment },
- { "number", number },
- { "operator", operator },
- { "rest", rest },
-}
-
-return saslexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-sql.lua b/context/data/textadept/context/lexers/scite-context-lexer-sql.lua
deleted file mode 100644
index cf0a03331..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-sql.lua
+++ /dev/null
@@ -1,238 +0,0 @@
-local info = {
- version = 1.001,
- comment = "scintilla lpeg lexer for sql",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local P, R, S = lpeg.P, lpeg.R, lpeg.S
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local sqllexer = lexer.new("sql","scite-context-lexer-sql")
-local whitespace = sqllexer.whitespace
-
--- ANSI SQL 92 | 99 | 2003
-
-local keywords_standard = {
- "absolute", "action", "add", "after", "all", "allocate", "alter", "and", "any",
- "are", "array", "as", "asc", "asensitive", "assertion", "asymmetric", "at",
- "atomic", "authorization", "avg", "before", "begin", "between", "bigint",
- "binary", "bit", "bit_length", "blob", "boolean", "both", "breadth", "by",
- "call", "called", "cascade", "cascaded", "case", "cast", "catalog", "char",
- "char_length", "character", "character_length", "check", "clob", "close",
- "coalesce", "collate", "collation", "column", "commit", "condition", "connect",
- "connection", "constraint", "constraints", "constructor", "contains", "continue",
- "convert", "corresponding", "count", "create", "cross", "cube", "current",
- "current_date", "current_default_transform_group", "current_path",
- "current_role", "current_time", "current_timestamp",
- "current_transform_group_for_type", "current_user", "cursor", "cycle", "data",
- "date", "day", "deallocate", "dec", "decimal", "declare", "default",
- "deferrable", "deferred", "delete", "depth", "deref", "desc", "describe",
- "descriptor", "deterministic", "diagnostics", "disconnect", "distinct", "do",
- "domain", "double", "drop", "dynamic", "each", "element", "else", "elseif",
- "end", "equals", "escape", "except", "exception", "exec", "execute", "exists",
- "exit", "external", "extract", "false", "fetch", "filter", "first", "float",
- "for", "foreign", "found", "free", "from", "full", "function", "general", "get",
- "global", "go", "goto", "grant", "group", "grouping", "handler", "having",
- "hold", "hour", "identity", "if", "immediate", "in", "indicator", "initially",
- "inner", "inout", "input", "insensitive", "insert", "int", "integer",
- "intersect", "interval", "into", "is", "isolation", "iterate", "join", "key",
- "language", "large", "last", "lateral", "leading", "leave", "left", "level",
- "like", "local", "localtime", "localtimestamp", "locator", "loop", "lower",
- "map", "match", "max", "member", "merge", "method", "min", "minute", "modifies",
- "module", "month", "multiset", "names", "national", "natural", "nchar", "nclob",
- "new", "next", "no", "none", "not", "null", "nullif", "numeric", "object",
- "octet_length", "of", "old", "on", "only", "open", "option", "or", "order",
- "ordinality", "out", "outer", "output", "over", "overlaps", "pad", "parameter",
- "partial", "partition", "path", "position", "precision", "prepare", "preserve",
- "primary", "prior", "privileges", "procedure", "public", "range", "read",
- "reads", "real", "recursive", "ref", "references", "referencing", "relative",
- "release", "repeat", "resignal", "restrict", "result", "return", "returns",
- "revoke", "right", "role", "rollback", "rollup", "routine", "row", "rows",
- "savepoint", "schema", "scope", "scroll", "search", "second", "section",
- "select", "sensitive", "session", "session_user", "set", "sets", "signal",
- "similar", "size", "smallint", "some", "space", "specific", "specifictype",
- "sql", "sqlcode", "sqlerror", "sqlexception", "sqlstate", "sqlwarning", "start",
- "state", "static", "submultiset", "substring", "sum", "symmetric", "system",
- "system_user", "table", "tablesample", "temporary", "then", "time", "timestamp",
- "timezone_hour", "timezone_minute", "to", "trailing", "transaction", "translate",
- "translation", "treat", "trigger", "trim", "true", "under", "undo", "union",
- "unique", "unknown", "unnest", "until", "update", "upper", "usage", "user",
- "using", "value", "values", "varchar", "varying", "view", "when", "whenever",
- "where", "while", "window", "with", "within", "without", "work", "write", "year",
- "zone",
-}
-
--- The dialects list is taken from drupal.org with standard subtracted.
---
--- MySQL 3.23.x | 4.x | 5.x
--- PostGreSQL 8.1
--- MS SQL Server 2000
--- MS ODBC
--- Oracle 10.2
-
-local keywords_dialects = {
- "a", "abort", "abs", "access", "ada", "admin", "aggregate", "alias", "also",
- "always", "analyse", "analyze", "assignment", "attribute", "attributes", "audit",
- "auto_increment", "avg_row_length", "backup", "backward", "bernoulli", "bitvar",
- "bool", "break", "browse", "bulk", "c", "cache", "cardinality", "catalog_name",
- "ceil", "ceiling", "chain", "change", "character_set_catalog",
- "character_set_name", "character_set_schema", "characteristics", "characters",
- "checked", "checkpoint", "checksum", "class", "class_origin", "cluster",
- "clustered", "cobol", "collation_catalog", "collation_name", "collation_schema",
- "collect", "column_name", "columns", "command_function", "command_function_code",
- "comment", "committed", "completion", "compress", "compute", "condition_number",
- "connection_name", "constraint_catalog", "constraint_name", "constraint_schema",
- "containstable", "conversion", "copy", "corr", "covar_pop", "covar_samp",
- "createdb", "createrole", "createuser", "csv", "cume_dist", "cursor_name",
- "database", "databases", "datetime", "datetime_interval_code",
- "datetime_interval_precision", "day_hour", "day_microsecond", "day_minute",
- "day_second", "dayofmonth", "dayofweek", "dayofyear", "dbcc", "defaults",
- "defined", "definer", "degree", "delay_key_write", "delayed", "delimiter",
- "delimiters", "dense_rank", "deny", "derived", "destroy", "destructor",
- "dictionary", "disable", "disk", "dispatch", "distinctrow", "distributed", "div",
- "dual", "dummy", "dump", "dynamic_function", "dynamic_function_code", "enable",
- "enclosed", "encoding", "encrypted", "end-exec", "enum", "errlvl", "escaped",
- "every", "exclude", "excluding", "exclusive", "existing", "exp", "explain",
- "fields", "file", "fillfactor", "final", "float4", "float8", "floor", "flush",
- "following", "force", "fortran", "forward", "freetext", "freetexttable",
- "freeze", "fulltext", "fusion", "g", "generated", "granted", "grants",
- "greatest", "header", "heap", "hierarchy", "high_priority", "holdlock", "host",
- "hosts", "hour_microsecond", "hour_minute", "hour_second", "identified",
- "identity_insert", "identitycol", "ignore", "ilike", "immutable",
- "implementation", "implicit", "include", "including", "increment", "index",
- "infile", "infix", "inherit", "inherits", "initial", "initialize", "insert_id",
- "instance", "instantiable", "instead", "int1", "int2", "int3", "int4", "int8",
- "intersection", "invoker", "isam", "isnull", "k", "key_member", "key_type",
- "keys", "kill", "lancompiler", "last_insert_id", "least", "length", "less",
- "limit", "lineno", "lines", "listen", "ln", "load", "location", "lock", "login",
- "logs", "long", "longblob", "longtext", "low_priority", "m", "matched",
- "max_rows", "maxextents", "maxvalue", "mediumblob", "mediumint", "mediumtext",
- "message_length", "message_octet_length", "message_text", "middleint",
- "min_rows", "minus", "minute_microsecond", "minute_second", "minvalue",
- "mlslabel", "mod", "mode", "modify", "monthname", "more", "move", "mumps",
- "myisam", "name", "nesting", "no_write_to_binlog", "noaudit", "nocheck",
- "nocompress", "nocreatedb", "nocreaterole", "nocreateuser", "noinherit",
- "nologin", "nonclustered", "normalize", "normalized", "nosuperuser", "nothing",
- "notify", "notnull", "nowait", "nullable", "nulls", "number", "octets", "off",
- "offline", "offset", "offsets", "oids", "online", "opendatasource", "openquery",
- "openrowset", "openxml", "operation", "operator", "optimize", "optionally",
- "options", "ordering", "others", "outfile", "overlay", "overriding", "owner",
- "pack_keys", "parameter_mode", "parameter_name", "parameter_ordinal_position",
- "parameter_specific_catalog", "parameter_specific_name",
- "parameter_specific_schema", "parameters", "pascal", "password", "pctfree",
- "percent", "percent_rank", "percentile_cont", "percentile_disc", "placing",
- "plan", "pli", "postfix", "power", "preceding", "prefix", "preorder", "prepared",
- "print", "proc", "procedural", "process", "processlist", "purge", "quote",
- "raid0", "raiserror", "rank", "raw", "readtext", "recheck", "reconfigure",
- "regexp", "regr_avgx", "regr_avgy", "regr_count", "regr_intercept", "regr_r2",
- "regr_slope", "regr_sxx", "regr_sxy", "regr_syy", "reindex", "reload", "rename",
- "repeatable", "replace", "replication", "require", "reset", "resource",
- "restart", "restore", "returned_cardinality", "returned_length",
- "returned_octet_length", "returned_sqlstate", "rlike", "routine_catalog",
- "routine_name", "routine_schema", "row_count", "row_number", "rowcount",
- "rowguidcol", "rowid", "rownum", "rule", "save", "scale", "schema_name",
- "schemas", "scope_catalog", "scope_name", "scope_schema", "second_microsecond",
- "security", "self", "separator", "sequence", "serializable", "server_name",
- "setof", "setuser", "share", "show", "shutdown", "simple", "soname", "source",
- "spatial", "specific_name", "sql_big_result", "sql_big_selects",
- "sql_big_tables", "sql_calc_found_rows", "sql_log_off", "sql_log_update",
- "sql_low_priority_updates", "sql_select_limit", "sql_small_result",
- "sql_warnings", "sqlca", "sqrt", "ssl", "stable", "starting", "statement",
- "statistics", "status", "stddev_pop", "stddev_samp", "stdin", "stdout",
- "storage", "straight_join", "strict", "string", "structure", "style",
- "subclass_origin", "sublist", "successful", "superuser", "synonym", "sysdate",
- "sysid", "table_name", "tables", "tablespace", "temp", "template", "terminate",
- "terminated", "text", "textsize", "than", "ties", "tinyblob", "tinyint",
- "tinytext", "toast", "top", "top_level_count", "tran", "transaction_active",
- "transactions_committed", "transactions_rolled_back", "transform", "transforms",
- "trigger_catalog", "trigger_name", "trigger_schema", "truncate", "trusted",
- "tsequal", "type", "uescape", "uid", "unbounded", "uncommitted", "unencrypted",
- "unlisten", "unlock", "unnamed", "unsigned", "updatetext", "use",
- "user_defined_type_catalog", "user_defined_type_code", "user_defined_type_name",
- "user_defined_type_schema", "utc_date", "utc_time", "utc_timestamp", "vacuum",
- "valid", "validate", "validator", "var_pop", "var_samp", "varbinary", "varchar2",
- "varcharacter", "variable", "variables", "verbose", "volatile", "waitfor",
- "width_bucket", "writetext", "x509", "xor", "year_month", "zerofill",
-}
-
-local space = patterns.space -- S(" \n\r\t\f\v")
-local any = patterns.any
-local restofline = patterns.restofline
-local startofline = patterns.startofline
-
-local squote = P("'")
-local dquote = P('"')
-local bquote = P('`')
-local escaped = P("\\") * P(1)
-
-local begincomment = P("/*")
-local endcomment = P("*/")
-
-local decimal = patterns.decimal
-local float = patterns.float
-local integer = P("-")^-1 * decimal
-
-local spacing = token(whitespace, space^1)
-local rest = token("default", any)
-
-local shortcomment = token("comment", (P("#") + P("--")) * restofline^0)
-local longcomment = token("comment", begincomment * (1-endcomment)^0 * endcomment^-1)
-
-local p_validword = R("AZ","az","__") * R("AZ","az","__","09")^0
-local identifier = token("default",p_validword)
-
-local shortstring = token("quote", dquote) -- can be shared
- * token("string", (escaped + (1-dquote))^0)
- * token("quote", dquote)
- + token("quote", squote)
- * token("string", (escaped + (1-squote))^0)
- * token("quote", squote)
- + token("quote", bquote)
- * token("string", (escaped + (1-bquote))^0)
- * token("quote", bquote)
-
-local p_keywords_s = exact_match(keywords_standard,nil,true)
-local p_keywords_d = exact_match(keywords_dialects,nil,true)
-local keyword_s = token("keyword", p_keywords_s)
-local keyword_d = token("command", p_keywords_d)
-
-local number = token("number", float + integer)
-local operator = token("special", S("+-*/%^!=<>;:{}[]().&|?~"))
-
-sqllexer._tokenstyles = context.styleset
-
-sqllexer._foldpattern = P("/*") + P("*/") + S("{}") -- separate entry else interference
-
-sqllexer._foldsymbols = {
- _patterns = {
- "/%*",
- "%*/",
- },
- ["comment"] = {
- ["/*"] = 1,
- ["*/"] = -1,
- }
-}
-
-sqllexer._rules = {
- { "whitespace", spacing },
- { "keyword-s", keyword_s },
- { "keyword-d", keyword_d },
- { "identifier", identifier },
- { "string", shortstring },
- { "longcomment", longcomment },
- { "shortcomment", shortcomment },
- { "number", number },
- { "operator", operator },
- { "rest", rest },
-}
-
-return sqllexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-tex-web.lua b/context/data/textadept/context/lexers/scite-context-lexer-tex-web.lua
deleted file mode 100644
index 88499a9c2..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-tex-web.lua
+++ /dev/null
@@ -1,23 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for tex web",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local texweblexer = lexer.new("tex-web","scite-context-lexer-tex")
-local texlexer = lexer.load("scite-context-lexer-tex")
-
--- can probably be done nicer now, a bit of a hack
-
-texweblexer._rules = texlexer._rules_web
-texweblexer._tokenstyles = texlexer._tokenstyles
-texweblexer._foldsymbols = texlexer._foldsymbols
-texweblexer._directives = texlexer._directives
-
-return texweblexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-tex.lua b/context/data/textadept/context/lexers/scite-context-lexer-tex.lua
deleted file mode 100644
index 71cfce0f5..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-tex.lua
+++ /dev/null
@@ -1,588 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for context",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- maybe: _LINEBYLINE variant for large files (no nesting)
--- maybe: protected_macros
-
---[[
-
- experiment dd 2009/10/28 .. todo:
-
- -- figure out if tabs instead of splits are possible
- -- locate an option to enter name in file dialogue (like windows permits)
- -- figure out why loading a file fails
- -- we cannot print to the log pane
- -- we cannot access props["keywordclass.macros.context.en"]
- -- lexer.get_property only handles integers
- -- we cannot run a command to get the location of mult-def.lua
-
- -- local interface = props["keywordclass.macros.context.en"]
- -- local interface = lexer.get_property("keywordclass.macros.context.en","")
-
-]]--
-
-local global, string, table, lpeg = _G, string, table, lpeg
-local P, R, S, V, C, Cmt, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt, lpeg.Cp, lpeg.Cc, lpeg.Ct
-local type, next = type, next
-local find, match, lower, upper = string.find, string.match, string.lower, string.upper
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-local inform = context.inform
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local contextlexer = lexer.new("tex","scite-context-lexer-tex")
-local whitespace = contextlexer.whitespace
-
-local cldlexer = lexer.load("scite-context-lexer-cld")
-local mpslexer = lexer.load("scite-context-lexer-mps")
-
-local commands = { en = { } }
-local primitives = { }
-local helpers = { }
-local constants = { }
-
-do -- todo: only once, store in global
-
- -- commands helpers primitives
-
- local definitions = context.loaddefinitions("scite-context-data-interfaces")
-
- if definitions then
- local used = { }
- for interface, list in next, definitions do
- if interface ~= "common" then
- used[#used+1] = interface
- local c = { }
- -- these are shared
- local list = definitions.common
- if list then
- for i=1,#list do
- c[list[i]] = true
- end
- end
- -- normally this one is empty
- list = definitions.en
- if list then
- for i=1,#list do
- c[list[i]] = true
- end
- end
- -- these are interface specific
- if interface ~= "en" then
- for i=1,#list do
- c[list[i]] = true
- end
- end
- commands[interface] = c
- end
- end
- table.sort(used)
- inform("context user interfaces '%s' supported",table.concat(used," "))
- end
-
- local definitions = context.loaddefinitions("scite-context-data-context")
- local overloaded = { }
-
- if definitions then
- helpers = definitions.helpers or { }
- constants = definitions.constants or { }
- for i=1,#helpers do
- overloaded[helpers[i]] = true
- end
- for i=1,#constants do
- overloaded[constants[i]] = true
- end
- end
-
- local definitions = context.loaddefinitions("scite-context-data-tex")
-
- if definitions then
- local function add(data,normal)
- for k, v in next, data do
- if v ~= "/" and v ~= "-" then
- if not overloaded[v] then
- primitives[#primitives+1] = v
- end
- if normal then
- v = "normal" .. v
- if not overloaded[v] then
- primitives[#primitives+1] = v
- end
- end
- end
- end
- end
- add(definitions.tex,true)
- add(definitions.etex,true)
- add(definitions.pdftex,true)
- add(definitions.aleph,true)
- add(definitions.omega,true)
- add(definitions.luatex,true)
- add(definitions.xetex,true)
- end
-
-end
-
-local currentcommands = commands.en or { }
-
-local cstoken = R("az","AZ","\127\255") + S("@!?_")
-
-local knowncommand = Cmt(cstoken^1, function(_,i,s)
- return currentcommands[s] and i
-end)
-
-local utfchar = context.utfchar
-local wordtoken = context.patterns.wordtoken
-local iwordtoken = context.patterns.iwordtoken
-local wordpattern = context.patterns.wordpattern
-local iwordpattern = context.patterns.iwordpattern
-local invisibles = context.patterns.invisibles
-local checkedword = context.checkedword
-local styleofword = context.styleofword
-local setwordlist = context.setwordlist
-local validwords = false
-local validminimum = 3
-
--- % language=uk
-
--- fails (empty loop message) ... latest lpeg issue?
-
--- todo: Make sure we only do this at the beginning .. a pitty that we
--- can't store a state .. now is done too often.
-
-local knownpreamble = Cmt(P("% "), function(input,i,_) -- todo : utfbomb, was #P("% ")
- if i < 10 then
- validwords, validminimum = false, 3
- local s, e, word = find(input,"^(.-)[\n\r]",i) -- combine with match
- if word then
- local interface = match(word,"interface=([a-z][a-z]+)")
- if interface and #interface == 2 then
- inform("enabling context user interface '%s'",interface)
- currentcommands = commands[interface] or commands.en or { }
- end
- local language = match(word,"language=([a-z][a-z]+)")
- validwords, validminimum = setwordlist(language)
- end
- end
- return false
-end)
-
--- -- the token list contains { "style", endpos } entries
--- --
--- -- in principle this is faster but it is also crash sensitive for large files
-
--- local constants_hash = { } for i=1,#constants do constants_hash [constants [i]] = true end
--- local helpers_hash = { } for i=1,#helpers do helpers_hash [helpers [i]] = true end
--- local primitives_hash = { } for i=1,#primitives do primitives_hash[primitives[i]] = true end
-
--- local specialword = Ct( P("\\") * Cmt( C(cstoken^1), function(input,i,s)
--- if currentcommands[s] then
--- return true, "command", i
--- elseif constants_hash[s] then
--- return true, "data", i
--- elseif helpers_hash[s] then
--- return true, "plain", i
--- elseif primitives_hash[s] then
--- return true, "primitive", i
--- else -- if starts with if then primitive
--- return true, "user", i
--- end
--- end) )
-
--- local specialword = P("\\") * Cmt( C(cstoken^1), function(input,i,s)
--- if currentcommands[s] then
--- return true, { "command", i }
--- elseif constants_hash[s] then
--- return true, { "data", i }
--- elseif helpers_hash[s] then
--- return true, { "plain", i }
--- elseif primitives_hash[s] then
--- return true, { "primitive", i }
--- else -- if starts with if then primitive
--- return true, { "user", i }
--- end
--- end)
-
--- experiment: keep space with whatever ... less tables
-
--- 10pt
-
-local commentline = P("%") * (1-S("\n\r"))^0
-local endline = S("\n\r")^1
-
-local space = patterns.space -- S(" \n\r\t\f\v")
-local any = patterns.any
-local backslash = P("\\")
-local hspace = S(" \t")
-
-local p_spacing = space^1
-local p_rest = any
-
-local p_preamble = knownpreamble
-local p_comment = commentline
------ p_command = backslash * knowncommand
------ p_constant = backslash * exact_match(constants)
------ p_helper = backslash * exact_match(helpers)
------ p_primitive = backslash * exact_match(primitives)
-
-local p_csdone = #(1-cstoken) + P(-1)
-
-local p_command = backslash * lexer.helpers.utfchartabletopattern(currentcommands) * p_csdone
-local p_constant = backslash * lexer.helpers.utfchartabletopattern(constants) * p_csdone
-local p_helper = backslash * lexer.helpers.utfchartabletopattern(helpers) * p_csdone
-local p_primitive = backslash * lexer.helpers.utfchartabletopattern(primitives) * p_csdone
-
-local p_ifprimitive = P("\\if") * cstoken^1
-local p_csname = backslash * (cstoken^1 + P(1))
-local p_grouping = S("{$}")
-local p_special = S("#()[]<>=\"")
-local p_extra = S("`~%^&_-+/\'|")
-local p_text = iwordtoken^1 --maybe add punctuation and space
-
-local p_reserved = backslash * (
- P("??") + R("az") * P("!")
- ) * cstoken^1
-
-local p_number = context.patterns.real
-local p_unit = P("pt") + P("bp") + P("sp") + P("mm") + P("cm") + P("cc") + P("dd")
-
--- no looking back = #(1-S("[=")) * cstoken^3 * #(1-S("=]"))
-
--- This one gives stack overflows:
---
--- local p_word = Cmt(iwordpattern, function(_,i,s)
--- if validwords then
--- return checkedword(validwords,validminimum,s,i)
--- else
--- -- return true, { "text", i }
--- return true, "text", i
--- end
--- end)
---
--- So we use this one instead:
-
------ p_word = Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
-local p_word = iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
-
------ p_text = (1 - p_grouping - p_special - p_extra - backslash - space + hspace)^1
-
--- keep key pressed at end-of syst-aux.mkiv:
---
--- 0 : 15 sec
--- 1 : 13 sec
--- 2 : 10 sec
---
--- the problem is that quite some style subtables get generated so collapsing ranges helps
-
-local option = 1
-
-if option == 1 then
-
- p_comment = p_comment^1
- p_grouping = p_grouping^1
- p_special = p_special^1
- p_extra = p_extra^1
-
- p_command = p_command^1
- p_constant = p_constant^1
- p_helper = p_helper^1
- p_primitive = p_primitive^1
- p_ifprimitive = p_ifprimitive^1
- p_reserved = p_reserved^1
-
-elseif option == 2 then
-
- local included = space^0
-
- p_comment = (p_comment * included)^1
- p_grouping = (p_grouping * included)^1
- p_special = (p_special * included)^1
- p_extra = (p_extra * included)^1
-
- p_command = (p_command * included)^1
- p_constant = (p_constant * included)^1
- p_helper = (p_helper * included)^1
- p_primitive = (p_primitive * included)^1
- p_ifprimitive = (p_ifprimitive * included)^1
- p_reserved = (p_reserved * included)^1
-
-end
-
-local p_invisible = invisibles^1
-
-local spacing = token(whitespace, p_spacing )
-
-local rest = token("default", p_rest )
-local preamble = token("preamble", p_preamble )
-local comment = token("comment", p_comment )
-local command = token("command", p_command )
-local constant = token("data", p_constant )
-local helper = token("plain", p_helper )
-local primitive = token("primitive", p_primitive )
-local ifprimitive = token("primitive", p_ifprimitive)
-local reserved = token("reserved", p_reserved )
-local csname = token("user", p_csname )
-local grouping = token("grouping", p_grouping )
-local number = token("number", p_number )
- * token("constant", p_unit )
-local special = token("special", p_special )
-local reserved = token("reserved", p_reserved ) -- reserved internal preproc
-local extra = token("extra", p_extra )
-local invisible = token("invisible", p_invisible )
-local text = token("default", p_text )
-local word = p_word
-
------ startluacode = token("grouping", P("\\startluacode"))
------ stopluacode = token("grouping", P("\\stopluacode"))
-
-local luastatus = false
-local luatag = nil
-local lualevel = 0
-
-local function startdisplaylua(_,i,s)
- luatag = s
- luastatus = "display"
- cldlexer._directives.cld_inline = false
- return true
-end
-
-local function stopdisplaylua(_,i,s)
- local ok = luatag == s
- if ok then
- cldlexer._directives.cld_inline = false
- luastatus = false
- end
- return ok
-end
-
-local function startinlinelua(_,i,s)
- if luastatus == "display" then
- return false
- elseif not luastatus then
- luastatus = "inline"
- cldlexer._directives.cld_inline = true
- lualevel = 1
- return true
- else-- if luastatus == "inline" then
- lualevel = lualevel + 1
- return true
- end
-end
-
-local function stopinlinelua_b(_,i,s) -- {
- if luastatus == "display" then
- return false
- elseif luastatus == "inline" then
- lualevel = lualevel + 1 -- ?
- return false
- else
- return true
- end
-end
-
-local function stopinlinelua_e(_,i,s) -- }
- if luastatus == "display" then
- return false
- elseif luastatus == "inline" then
- lualevel = lualevel - 1
- local ok = lualevel <= 0 -- was 0
- if ok then
- cldlexer._directives.cld_inline = false
- luastatus = false
- end
- return ok
- else
- return true
- end
-end
-
-contextlexer._reset_parser = function()
- luastatus = false
- luatag = nil
- lualevel = 0
-end
-
-local luaenvironment = P("lua") * (P("setups") + P("code") + P("parameterset") + P(true))
- + P("ctxfunction") * (P("definition") + P(true))
-
-local inlinelua = P("\\") * (
- P("ctx") * (P("lua") + P("command") + P("late") * (P("lua") + P("command")) + P("function"))
- + P("cld") * (P("command") + P("context"))
- + P("lua") * (P("expr") + P("script") + P("thread"))
- + (P("direct") + P("late")) * P("lua")
- )
-
-local startlua = P("\\start") * Cmt(luaenvironment,startdisplaylua)
- + P("<?lua") * Cmt(P(true),startdisplaylua)
- + inlinelua * space^0 * ( Cmt(P("{"),startinlinelua) )
-
-local stoplua = P("\\stop") * Cmt(luaenvironment,stopdisplaylua)
- + P("?>") * Cmt(P(true),stopdisplaylua)
- + Cmt(P("{"),stopinlinelua_b)
- + Cmt(P("}"),stopinlinelua_e)
-
-local startluacode = token("embedded", startlua)
-local stopluacode = #stoplua * token("embedded", stoplua)
-
-local luacall = P("clf_") * R("az","__","AZ")^1
-
-local metafuncall = ( P("reusable") + P("usable") + P("unique") + P("use") + P("reuse") + P("overlay") ) * ("MPgraphic")
- + P("uniqueMPpagegraphic")
- + P("MPpositiongraphic")
-
-local metafunenvironment = metafuncall -- ( P("use") + P("reusable") + P("unique") ) * ("MPgraphic")
- + P("MP") * ( P("code")+ P("page") + P("inclusions") + P("initializations") + P("definitions") + P("extensions") + P("graphic") + P("calculation") )
-
-local startmetafun = P("\\start") * metafunenvironment
-local stopmetafun = P("\\stop") * metafunenvironment -- todo match start
-
------ subsystem = token("embedded", P("\\xml") * R("az")^1 + (P("\\st") * (P("art") + P("op")) * P("xmlsetups")))
-local subsystemtags = P("xml") + P("btx") -- will be pluggable or maybe even a proper list of valid commands
-local subsystemmacro = P("\\") * (subsystemtags * R("az")^1 + (R("az")-subsystemtags)^1 * subsystemtags * R("az")^1)
-local subsystem = token("embedded", subsystemmacro)
-
-local openargument = token("special", P("{"))
-local closeargument = token("special", P("}"))
-local argumentcontent = token("default",(1-P("}"))^0) -- maybe space needs a treatment
-
-local metafunarguments = (spacing^0 * openargument * argumentcontent * closeargument)^-2
-
-local startmetafuncode = token("embedded", startmetafun) * metafunarguments
-local stopmetafuncode = token("embedded", stopmetafun)
-
-local callers = token("embedded", P("\\") * metafuncall) * metafunarguments
- + token("embedded", P("\\") * luacall)
-
-lexer.embed_lexer(contextlexer, mpslexer, startmetafuncode, stopmetafuncode)
-lexer.embed_lexer(contextlexer, cldlexer, startluacode, stopluacode)
-
--- preamble is inefficient as it probably gets called each time (so some day I really need to
--- patch the plugin)
-
-contextlexer._preamble = preamble
-
-contextlexer._rules = {
- { "whitespace", spacing },
- -- { "preamble", preamble },
- { "word", word },
- { "text", text }, -- non words
- { "comment", comment },
- { "constant", constant },
- -- { "subsystem", subsystem },
- { "callers", callers },
- { "subsystem", subsystem },
- { "ifprimitive", ifprimitive },
- { "helper", helper },
- { "command", command },
- { "primitive", primitive },
- -- { "subsystem", subsystem },
- { "reserved", reserved },
- { "csname", csname },
- -- { "whatever", specialword }, -- not yet, crashes
- { "grouping", grouping },
- -- { "number", number },
- { "special", special },
- { "extra", extra },
- { "invisible", invisible },
- { "rest", rest },
-}
-
--- Watch the text grabber, after all, we're talking mostly of text (beware,
--- no punctuation here as it can be special). We might go for utf here.
-
-local web = lexer.loadluafile("scite-context-lexer-web-snippets")
-
-if web then
-
- lexer.inform("supporting web snippets in tex lexer")
-
- contextlexer._rules_web = {
- { "whitespace", spacing },
- { "text", text }, -- non words
- { "comment", comment },
- { "constant", constant },
- { "callers", callers },
- { "ifprimitive", ifprimitive },
- { "helper", helper },
- { "command", command },
- { "primitive", primitive },
- { "reserved", reserved },
- { "csname", csname },
- { "grouping", grouping },
- { "special", special },
- { "extra", extra },
- { "invisible", invisible },
- { "web", web.pattern },
- { "rest", rest },
- }
-
-else
-
- lexer.report("not supporting web snippets in tex lexer")
-
- contextlexer._rules_web = {
- { "whitespace", spacing },
- { "text", text }, -- non words
- { "comment", comment },
- { "constant", constant },
- { "callers", callers },
- { "ifprimitive", ifprimitive },
- { "helper", helper },
- { "command", command },
- { "primitive", primitive },
- { "reserved", reserved },
- { "csname", csname },
- { "grouping", grouping },
- { "special", special },
- { "extra", extra },
- { "invisible", invisible },
- { "rest", rest },
- }
-
-end
-
-contextlexer._tokenstyles = context.styleset
-
-local environment = {
- ["\\start"] = 1, ["\\stop"] = -1,
- -- ["\\begin"] = 1, ["\\end" ] = -1,
-}
-
--- local block = {
--- ["\\begin"] = 1, ["\\end" ] = -1,
--- }
-
-local group = {
- ["{"] = 1, ["}"] = -1,
-}
-
-contextlexer._foldpattern = P("\\" ) * (P("start") + P("stop")) + S("{}") -- separate entry else interference
-
-contextlexer._foldsymbols = { -- these need to be style references .. todo: multiple styles
- _patterns = {
- "\\start", "\\stop", -- regular environments
- -- "\\begin", "\\end", -- (moveable) blocks
- "[{}]",
- },
- ["command"] = environment,
- ["constant"] = environment,
- ["data"] = environment,
- ["user"] = environment,
- ["embedded"] = environment,
- ["helper"] = environment,
- ["plain"] = environment,
- ["grouping"] = group,
-}
-
--- context.inspect(contextlexer)
-
-return contextlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-txt.lua b/context/data/textadept/context/lexers/scite-context-lexer-txt.lua
deleted file mode 100644
index 8ecfff7cb..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-txt.lua
+++ /dev/null
@@ -1,80 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for plain text (with spell checking)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local P, S, Cmt, Cp = lpeg.P, lpeg.S, lpeg.Cmt, lpeg.Cp
-local find, match = string.find, string.match
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local textlexer = lexer.new("txt","scite-context-lexer-txt")
-local whitespace = textlexer.whitespace
-
-local space = patterns.space
-local any = patterns.any
-local wordtoken = patterns.wordtoken
-local wordpattern = patterns.wordpattern
-
-local checkedword = context.checkedword
-local styleofword = context.styleofword
-local setwordlist = context.setwordlist
-local validwords = false
-local validminimum = 3
-
--- local styleset = context.newstyleset {
--- "default",
--- "text", "okay", "error", "warning",
--- "preamble",
--- }
-
--- [#!-%] language=uk
-
-local p_preamble = Cmt((S("#!-%") * P(" ")), function(input,i,_) -- todo: utf bomb no longer #
- if i == 1 then -- < 10 then
- validwords, validminimum = false, 3
- local s, e, line = find(input,"^[#!%-%%](.+)[\n\r]",i)
- if line then
- local language = match(line,"language=([a-z]+)")
- if language then
- validwords, validminimum = setwordlist(language)
- end
- end
- end
- return false
-end)
-
-local t_preamble =
- token("preamble", p_preamble)
-
-local t_word =
- wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
-
-local t_text =
- token("default", wordtoken^1)
-
-local t_rest =
- token("default", (1-wordtoken-space)^1)
-
-local t_spacing =
- token(whitespace, space^1)
-
-textlexer._rules = {
- { "whitespace", t_spacing },
- { "preamble", t_preamble },
- { "word", t_word }, -- words >= 3
- { "text", t_text }, -- non words
- { "rest", t_rest },
-}
-
-textlexer._LEXBYLINE = true -- new (needs testing, not yet as the system changed in 3.24)
-textlexer._tokenstyles = context.styleset
-
-return textlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-web-snippets.lua b/context/data/textadept/context/lexers/scite-context-lexer-web-snippets.lua
deleted file mode 100644
index 5121030cc..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-web-snippets.lua
+++ /dev/null
@@ -1,132 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for web snippets",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local P, R, S, C, Cg, Cb, Cs, Cmt, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cg, lpeg.Cb, lpeg.Cs, lpeg.Cmt, lpeg.match
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local websnippets = { }
-
-local space = patterns.space -- S(" \n\r\t\f\v")
-local any = patterns.any
-local restofline = patterns.restofline
-local startofline = patterns.startofline
-
-local squote = P("'")
-local dquote = P('"')
-local period = P(".")
-
-local t_whitespace = token(whitespace, space^1)
-local t_spacing = token("default", space^1)
-local t_rest = token("default", any)
-
--- the web subset
-
-local p_beginofweb = P("@")
-local p_endofweb = P("@>")
-
--- @, @/ @| @# @+ @; @[ @]
-
-local p_directive_1 = p_beginofweb * S(",/|#+;[]")
-local t_directive_1 = token("label",p_directive_1)
-
--- @.text @>(monospaced)
--- @:text @>(macro driven)
--- @= verbose@>
--- @! underlined @>
--- @t text @> (hbox)
--- @q ignored @>
-
-local p_typeset = p_beginofweb * S(".:=!tq")
-local t_typeset = token("label",p_typeset) * token("warning",(1-p_endofweb)^1) * token("label",p_endofweb)
-
--- @^index@>
-
-local p_index = p_beginofweb * P("^")
-local t_index = token("label",p_index) * token("function",(1-p_endofweb)^1) * token("label",p_endofweb)
-
--- @f text renderclass
-
-local p_render = p_beginofweb * S("f")
-local t_render = token("label",p_render) * t_spacing * token("warning",(1-space)^1) * t_spacing * token("label",(1-space)^1)
-
--- @s idem
--- @p idem
--- @& strip (spaces before)
--- @h
-
-local p_directive_2 = p_beginofweb * S("sp&h")
-local t_directive_2 = token("label",p_directive_2)
-
--- @< ... @> [=|+=|]
--- @(foo@>
-
-local p_reference = p_beginofweb * S("<(")
-local t_reference = token("label",p_reference) * token("function",(1-p_endofweb)^1) * token("label",p_endofweb * (P("+=") + P("="))^-1)
-
--- @'char' (ascii code)
-
-local p_character = p_beginofweb * squote
-local t_character = token("label",p_character) * token("reserved",(1-squote)^1) * token("label",squote)
-
--- @l nonascii
-
-local p_nonascii = p_beginofweb * S("l")
-local t_nonascii = token("label",p_nonascii) * t_spacing * token("reserved",(1-space)^1)
-
--- @x @y @z changefile
--- @i webfile
-
-local p_filename = p_beginofweb * S("xyzi")
-local t_filename = token("label",p_filename) * t_spacing * token("reserved",(1-space)^1)
-
--- @@ escape
-
-local p_escape = p_beginofweb * p_beginofweb
-local t_escape = token("text",p_escape)
-
--- structure
-
--- @* title.
-
--- local p_section = p_beginofweb * P("*")^1
--- local t_section = token("label",p_section) * t_spacing * token("function",(1-period)^1) * token("label",period)
-
--- @ explanation
-
--- local p_explanation = p_beginofweb
--- local t_explanation = token("label",p_explanation) * t_spacing^1
-
--- @d macro
-
--- local p_macro = p_beginofweb * P("d")
--- local t_macro = token("label",p_macro)
-
--- @c code
-
--- local p_code = p_beginofweb * P("c")
--- local t_code = token("label",p_code)
-
-websnippets.pattern = P (
- t_typeset
- + t_index
- + t_render
- + t_reference
- + t_filename
- + t_directive_1
- + t_directive_2
- + t_character
- + t_nonascii
- + t_escape
-)
-
-return websnippets
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-web.lua b/context/data/textadept/context/lexers/scite-context-lexer-web.lua
deleted file mode 100644
index 81a6f90df..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-web.lua
+++ /dev/null
@@ -1,67 +0,0 @@
-local info = {
- version = 1.003,
- comment = "scintilla lpeg lexer for web",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local P, R, S = lpeg.P, lpeg.R, lpeg.S
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local weblexer = lexer.new("web","scite-context-lexer-web")
-local whitespace = weblexer.whitespace
-
-local space = patterns.space -- S(" \n\r\t\f\v")
-local any = patterns.any
-local restofline = patterns.restofline
-local startofline = patterns.startofline
-
-local period = P(".")
-local percent = P("%")
-
-local spacing = token(whitespace, space^1)
-local rest = token("default", any)
-
-local eop = P("@>")
-local eos = eop * P("+")^-1 * P("=")
-
--- we can put some of the next in the web-snippets file
--- is f okay here?
-
-local texcomment = token("comment", percent * restofline^0)
-
-local texpart = token("label",P("@")) * #spacing
- + token("label",P("@") * P("*")^1) * token("function",(1-period)^1) * token("label",period)
-local midpart = token("label",P("@d")) * #spacing
- + token("label",P("@f")) * #spacing
-local cpppart = token("label",P("@c")) * #spacing
- + token("label",P("@p")) * #spacing
- + token("label",P("@") * S("<(")) * token("function",(1-eop)^1) * token("label",eos)
-
-local anypart = P("@") * ( P("*")^1 + S("dfcp") + space^1 + S("<(") * (1-eop)^1 * eos )
-local limbo = 1 - anypart - percent
-
-local texlexer = lexer.load("scite-context-lexer-tex-web")
-local cpplexer = lexer.load("scite-context-lexer-cpp-web")
-
-lexer.embed_lexer(weblexer, texlexer, texpart + limbo, #anypart)
-lexer.embed_lexer(weblexer, cpplexer, cpppart + midpart, #anypart)
-
-local texcomment = token("comment", percent * restofline^0)
-
-weblexer._rules = {
- { "whitespace", spacing },
- { "texcomment", texcomment }, -- else issues with first tex section
- { "rest", rest },
-}
-
-weblexer._tokenstyles = context.styleset
-
-return weblexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-xml-cdata.lua b/context/data/textadept/context/lexers/scite-context-lexer-xml-cdata.lua
deleted file mode 100644
index f5ca86cb2..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-xml-cdata.lua
+++ /dev/null
@@ -1,33 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for xml cdata",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local P = lpeg.P
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local xmlcdatalexer = lexer.new("xml-cdata","scite-context-lexer-xml-cdata")
-local whitespace = xmlcdatalexer.whitespace
-
-local space = patterns.space
-local nospace = 1 - space - P("]]>")
-
-local t_spaces = token(whitespace, space ^1)
-local t_cdata = token("comment", nospace^1)
-
-xmlcdatalexer._rules = {
- { "whitespace", t_spaces },
- { "cdata", t_cdata },
-}
-
-xmlcdatalexer._tokenstyles = context.styleset
-
-return xmlcdatalexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-xml-comment.lua b/context/data/textadept/context/lexers/scite-context-lexer-xml-comment.lua
deleted file mode 100644
index 40de8f603..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-xml-comment.lua
+++ /dev/null
@@ -1,33 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for xml comments",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local P = lpeg.P
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local xmlcommentlexer = lexer.new("xml-comment","scite-context-lexer-xml-comment")
-local whitespace = xmlcommentlexer.whitespace
-
-local space = patterns.space
-local nospace = 1 - space - P("-->")
-
-local t_spaces = token(whitespace, space ^1)
-local t_comment = token("comment", nospace^1)
-
-xmlcommentlexer._rules = {
- { "whitespace", t_spaces },
- { "comment", t_comment },
-}
-
-xmlcommentlexer._tokenstyles = context.styleset
-
-return xmlcommentlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-xml-script.lua b/context/data/textadept/context/lexers/scite-context-lexer-xml-script.lua
deleted file mode 100644
index a1b717a6a..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-xml-script.lua
+++ /dev/null
@@ -1,33 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for xml script",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local P = lpeg.P
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local xmlscriptlexer = lexer.new("xml-script","scite-context-lexer-xml-script")
-local whitespace = xmlscriptlexer.whitespace
-
-local space = patterns.space
-local nospace = 1 - space - (P("</") * P("script") + P("SCRIPT")) * P(">")
-
-local t_spaces = token(whitespace, space ^1)
-local t_script = token("default", nospace^1)
-
-xmlscriptlexer._rules = {
- { "whitespace", t_spaces },
- { "script", t_script },
-}
-
-xmlscriptlexer._tokenstyles = context.styleset
-
-return xmlscriptlexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer-xml.lua b/context/data/textadept/context/lexers/scite-context-lexer-xml.lua
deleted file mode 100644
index bbdb3febc..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer-xml.lua
+++ /dev/null
@@ -1,350 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer for xml",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- adapted from the regular context pretty printer code (after all, lexing
--- boils down to much of the same and there are only so many ways to do
--- things). Simplified a bit as we have a different nesting model.
-
--- todo: parse entities in attributes
-
-local global, string, table, lpeg = _G, string, table, lpeg
-local P, R, S, C, Cmt, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cmt, lpeg.Cp
-local type = type
-local match, find = string.match, string.find
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-local exact_match = lexer.exact_match
-
-local xmllexer = lexer.new("xml","scite-context-lexer-xml")
-local whitespace = xmllexer.whitespace
-
-local xmlcommentlexer = lexer.load("scite-context-lexer-xml-comment")
-local xmlcdatalexer = lexer.load("scite-context-lexer-xml-cdata")
-local xmlscriptlexer = lexer.load("scite-context-lexer-xml-script")
-local lualexer = lexer.load("scite-context-lexer-lua")
-
-local space = patterns.space
-local any = patterns.any
-
-local dquote = P('"')
-local squote = P("'")
-local colon = P(":")
-local semicolon = P(";")
-local equal = P("=")
-local ampersand = P("&")
-
-local name = (R("az","AZ","09") + S("_-."))^1
-local openbegin = P("<")
-local openend = P("</")
-local closebegin = P("/>") + P(">")
-local closeend = P(">")
-local opencomment = P("<!--")
-local closecomment = P("-->")
-local openinstruction = P("<?")
-local closeinstruction = P("?>")
-local opencdata = P("<![CDATA[")
-local closecdata = P("]]>")
-local opendoctype = P("<!DOCTYPE") -- could grab the whole doctype
-local closedoctype = P("]>") + P(">")
-local openscript = openbegin * (P("script") + P("SCRIPT")) * (1-closeend)^0 * closeend -- begin
-local closescript = openend * (P("script") + P("SCRIPT")) * closeend
-
-local openlua = "<?lua"
-local closelua = "?>"
-
--- <!DOCTYPE Something PUBLIC "... ..." "..." [ ... ] >
--- <!DOCTYPE Something PUBLIC "... ..." "..." >
--- <!DOCTYPE Something SYSTEM "... ..." [ ... ] >
--- <!DOCTYPE Something SYSTEM "... ..." >
--- <!DOCTYPE Something [ ... ] >
--- <!DOCTYPE Something >
-
-local entity = ampersand * (1-semicolon)^1 * semicolon
-
-local utfchar = context.utfchar
-local wordtoken = context.patterns.wordtoken
-local iwordtoken = context.patterns.iwordtoken
-local wordpattern = context.patterns.wordpattern
-local iwordpattern = context.patterns.iwordpattern
-local invisibles = context.patterns.invisibles
-local checkedword = context.checkedword
-local styleofword = context.styleofword
-local setwordlist = context.setwordlist
-local validwords = false
-local validminimum = 3
-
--- <?xml version="1.0" encoding="UTF-8" language="uk" ?>
---
--- <?context-directive editor language us ?>
-
-local t_preamble = Cmt(P("<?xml "), function(input,i,_) -- todo: utf bomb, no longer #
- if i < 200 then
- validwords, validminimum = false, 3
- local language = match(input,"^<%?xml[^>]*%?>%s*<%?context%-directive%s+editor%s+language%s+(..)%s+%?>")
- -- if not language then
- -- language = match(input,"^<%?xml[^>]*language=[\"\'](..)[\"\'][^>]*%?>",i)
- -- end
- if language then
- validwords, validminimum = setwordlist(language)
- end
- end
- return false
-end)
-
-local t_word =
--- Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
- iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
-
-local t_rest =
- token("default", any)
-
-local t_text =
- token("default", (1-S("<>&")-space)^1)
-
-local t_spacing =
- token(whitespace, space^1)
-
-local t_optionalwhitespace =
- token("default", space^1)^0
-
-local t_localspacing =
- token("default", space^1)
-
--- Because we want a differently colored open and close we need an embedded lexer (whitespace
--- trigger). What is actually needed is that scintilla applies the current whitespace style.
--- Even using different style keys is not robust as they can be shared. I'll fix the main
--- lexer code.
-
-local t_sstring =
- token("quote",dquote)
- * token("string",(1-dquote)^0) -- different from context
- * token("quote",dquote)
-
-local t_dstring =
- token("quote",squote)
- * token("string",(1-squote)^0) -- different from context
- * token("quote",squote)
-
--- local t_comment =
--- token("command",opencomment)
--- * token("comment",(1-closecomment)^0) -- different from context
--- * token("command",closecomment)
-
--- local t_cdata =
--- token("command",opencdata)
--- * token("comment",(1-closecdata)^0) -- different from context
--- * token("command",closecdata)
-
--- maybe cdata just text (then we don't need the extra lexer as we only have one comment then)
-
--- <!DOCTYPE Something PUBLIC "... ..." "..." [ ... ] >
--- <!DOCTYPE Something PUBLIC "... ..." "..." >
--- <!DOCTYPE Something SYSTEM "... ..." [ ... ] >
--- <!DOCTYPE Something SYSTEM "... ..." >
--- <!DOCTYPE Something [ ... ] >
--- <!DOCTYPE Something >
-
--- <!ENTITY xxxx SYSTEM "yyyy" NDATA zzzz>
--- <!ENTITY xxxx PUBLIC "yyyy" >
--- <!ENTITY xxxx "yyyy" >
-
-local t_docstr = t_dstring + t_sstring
-
-local t_docent = token("command",P("<!ENTITY"))
- * t_optionalwhitespace
- * token("keyword",name)
- * t_optionalwhitespace
- * (
- (
- token("constant",P("SYSTEM"))
- * t_optionalwhitespace
- * t_docstr
- * t_optionalwhitespace
- * token("constant",P("NDATA"))
- * t_optionalwhitespace
- * token("keyword",name)
- ) + (
- token("constant",P("PUBLIC"))
- * t_optionalwhitespace
- * t_docstr
- ) + (
- t_docstr
- )
- )
- * t_optionalwhitespace
- * token("command",P(">"))
-
-local t_docele = token("command",P("<!ELEMENT"))
- * t_optionalwhitespace
- * token("keyword",name)
- * t_optionalwhitespace
- * token("command",P("("))
- * (
- t_localspacing
- + token("constant",P("#CDATA") + P("#PCDATA") + P("ANY"))
- + token("text",P(","))
- + token("comment",(1-S(",)"))^1)
- )^1
- * token("command",P(")"))
- * t_optionalwhitespace
- * token("command",P(">"))
-
-local t_docset = token("command",P("["))
- * t_optionalwhitespace
- * ((t_optionalwhitespace * (t_docent + t_docele))^1 + token("comment",(1-P("]"))^0))
- * t_optionalwhitespace
- * token("command",P("]"))
-
-local t_doctype = token("command",P("<!DOCTYPE"))
- * t_optionalwhitespace
- * token("keyword",name)
- * t_optionalwhitespace
- * (
- (
- token("constant",P("PUBLIC"))
- * t_optionalwhitespace
- * t_docstr
- * t_optionalwhitespace
- * t_docstr
- * t_optionalwhitespace
- ) + (
- token("constant",P("SYSTEM"))
- * t_optionalwhitespace
- * t_docstr
- * t_optionalwhitespace
- )
- )^-1
- * t_docset^-1
- * t_optionalwhitespace
- * token("command",P(">"))
-
-lexer.embed_lexer(xmllexer, lualexer, token("command", openlua), token("command", closelua))
-lexer.embed_lexer(xmllexer, xmlcommentlexer, token("command", opencomment), token("command", closecomment))
-lexer.embed_lexer(xmllexer, xmlcdatalexer, token("command", opencdata), token("command", closecdata))
-lexer.embed_lexer(xmllexer, xmlscriptlexer, token("command", openscript), token("command", closescript))
-
--- local t_name =
--- token("plain",name)
--- * (
--- token("default",colon)
--- * token("keyword",name)
--- )
--- + token("keyword",name)
-
-local t_name = -- more robust
- token("plain",name * colon)^-1
- * token("keyword",name)
-
--- local t_key =
--- token("plain",name)
--- * (
--- token("default",colon)
--- * token("constant",name)
--- )
--- + token("constant",name)
-
-local t_key =
- token("plain",name * colon)^-1
- * token("constant",name)
-
-local t_attributes = (
- t_optionalwhitespace
- * t_key
- * t_optionalwhitespace
- * token("plain",equal)
- * t_optionalwhitespace
- * (t_dstring + t_sstring)
- * t_optionalwhitespace
-)^0
-
-local t_open =
- token("keyword",openbegin)
- * (
- t_name
- * t_optionalwhitespace
- * t_attributes
- * token("keyword",closebegin)
- +
- token("error",(1-closebegin)^1)
- )
-
-local t_close =
- token("keyword",openend)
- * (
- t_name
- * t_optionalwhitespace
- * token("keyword",closeend)
- +
- token("error",(1-closeend)^1)
- )
-
-local t_entity =
- token("constant",entity)
-
-local t_instruction =
- token("command",openinstruction * P("xml"))
- * t_optionalwhitespace
- * t_attributes
- * t_optionalwhitespace
- * token("command",closeinstruction)
- + token("command",openinstruction * name)
- * token("default",(1-closeinstruction)^1)
- * token("command",closeinstruction)
-
-local t_invisible =
- token("invisible",invisibles^1)
-
--- local t_preamble =
--- token("preamble", t_preamble )
-
-xmllexer._rules = {
- { "whitespace", t_spacing },
- { "preamble", t_preamble },
- { "word", t_word },
- -- { "text", t_text },
- -- { "comment", t_comment },
- -- { "cdata", t_cdata },
- { "doctype", t_doctype },
- { "instruction", t_instruction },
- { "close", t_close },
- { "open", t_open },
- { "entity", t_entity },
- { "invisible", t_invisible },
- { "rest", t_rest },
-}
-
-xmllexer._tokenstyles = context.styleset
-
-xmllexer._foldpattern = P("</") + P("<") + P("/>") -- separate entry else interference
-+ P("<!--") + P("-->")
-
-xmllexer._foldsymbols = {
- _patterns = {
- "</",
- "/>",
- "<",
- },
- ["keyword"] = {
- ["</"] = -1,
- ["/>"] = -1,
- ["<"] = 1,
- },
- ["command"] = {
- ["</"] = -1,
- ["/>"] = -1,
- ["<!--"] = 1,
- ["-->"] = -1,
- ["<"] = 1,
- },
-}
-
-return xmllexer
diff --git a/context/data/textadept/context/lexers/scite-context-lexer.lua b/context/data/textadept/context/lexers/scite-context-lexer.lua
deleted file mode 100644
index 289697b72..000000000
--- a/context/data/textadept/context/lexers/scite-context-lexer.lua
+++ /dev/null
@@ -1,2686 +0,0 @@
-local info = {
- version = 1.400,
- comment = "basics for scintilla lpeg lexer for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
- comment = "contains copyrighted code from mitchell.att.foicica.com",
-
-}
-
--- We need a copy of this file to lexer.lua in the same path. This was not needed
--- before version 10 but I can't figure out what else to do. It looks like there
--- is some loading of lexer.lua but I can't see where.
-
--- For a while it looked like we're stuck with scite 3 because there would be no
--- update of scintillua for the newer versions (c++ changes) but now it looks that
--- there will be updates (2021). There is a dll for scite >= 5 but it doesn't
--- work (yet). In version 5.20+ the scintillua dll makes scite crash (alsl when I
--- use the recommended import). In an early 5.02 loading the (shipped) lpeg lexer
--- does nothing at all. There have been changes in the lua interface too but I need
--- to compare the old and new lib. For now I gave up and got back to version 3+. It
--- would be nice if error messages would go to the log pane so that wget an idea
--- what happens. After all the code involved (below) is not that much and not that
--- complex either.
---
--- Actually, scite 5.22 also crashed when a program was launched so better wait
--- for a while. (In the worst case, when it all stops working, we need to migrate
--- to visual code, which is out backup/fallback plan.) I didn't test if the latest
--- textadept still works with our lexer variant. In the meantime that editor has
--- grown to some 30 MB so it is no longer a lightweight option (scite with scintilla
--- is still quite small).
-
-if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
-
-local log = false
-local trace = false
-local detail = false
-local show = false -- nice for tracing (also for later)
-local collapse = false -- can save some 15% (maybe easier on scintilla)
-local inspect = false -- can save some 15% (maybe easier on scintilla)
-
--- local log = true
--- local trace = true
-
--- GET GOING
---
--- You need to copy this file over lexer.lua. In principle other lexers could work
--- too but not now. Maybe some day. All patterns will move into the patterns name
--- space. I might do the same with styles. If you run an older version of SciTE you
--- can take one of the archives. Pre 3.41 versions can just be copied to the right
--- path, as there we still use part of the normal lexer. Below we mention some
--- issues with different versions of SciTE. We try to keep up with changes but best
--- check careful if the version that yuou install works as expected because SciTE
--- and the scintillua dll need to be in sync.
---
--- REMARK
---
--- We started using lpeg lexing as soon as it came available. Because we had rather
--- demanding files and also wanted to use nested lexers, we ended up with our own
--- variant. At least at that time this was more robust and also much faster (as we
--- have some pretty large Lua data files and also work with large xml files). As a
--- consequence successive versions had to be adapted to changes in the (at that time
--- still unstable) api. In addition to lexing we also have spell checking and such.
--- Around version 3.60 things became more stable so I don't expect to change much.
---
--- LEXING
---
--- When pc's showed up we wrote our own editor (texedit) in MODULA 2. It was fast,
--- had multiple overlapping (text) windows, could run in the at most 1M memory at
--- that time, etc. The realtime file browsing with lexing that we had at that time
--- is still on my current wish list. The color scheme and logic that we used related
--- to the logic behind the ConTeXt user interface that evolved.
---
--- Later I rewrote the editor in perl/tk. I don't like the perl syntax but tk
--- widgets are very powerful and hard to beat. In fact, TextAdept reminds me of
--- that: wrap your own interface around a framework (tk had an edit control that one
--- could control completely not that different from scintilla). Last time I checked
--- it still ran fine so I might try to implement something like its file handling in
--- TextAdept.
---
--- In the end I settled for SciTE for which I wrote TeX and MetaPost lexers that
--- could handle keyword sets. With respect to lexing (syntax highlighting) ConTeXt
--- has a long history, if only because we need it for manuals. Anyway, in the end we
--- arrived at lpeg based lexing (which is quite natural as we have lots of lpeg
--- usage in ConTeXt). The basic color schemes haven't changed much. The most
--- prominent differences are the nested lexers.
---
--- In the meantime I made the lexer suitable for typesetting sources which was no
--- big deal as we already had that in place (ConTeXt used lpeg from the day it
--- showed up so we have several lexing options there too).
---
--- Keep in mind that in ConTeXt (typesetting) lexing can follow several approaches:
--- line based (which is handy for verbatim mode), syntax mode (which is nice for
--- tutorials), and tolerant mode (so that one can also show bad examples or errors).
--- These demands can clash.
---
--- HISTORY
---
--- The remarks below are more for myself so that I keep track of changes in the
--- way we adapt to the changes in the scintillua and scite.
---
--- The fold and lex functions are copied and patched from original code by Mitchell
--- (see lexer.lua) in the scintillua distribution. So whatever I say below, assume
--- that all errors are mine. The ability to use lpeg in scintilla is a real nice
--- addition and a brilliant move. The code is a byproduct of the (mainly Lua based)
--- TextAdept which at the time I ran into it was a rapidly moving target so I
--- decided to stick ot SciTE. When I played with it, it had no realtime output pane
--- although that seems to be dealt with now (2017). I need to have a look at it in
--- more detail but a first test again made the output hang and it was a bit slow too
--- (and I also want the log pane as SciTE has it, on the right, in view). So, for
--- now I stick to SciTE even when it's somewhat crippled by the fact that we cannot
--- hook our own (language dependent) lexer into the output pane (somehow the
--- errorlist lexer is hard coded into the editor). Hopefully that will change some
--- day. The ConTeXt distribution has cmd runner for textdept that will plug in the
--- lexers discussed here as well as a dedicated runner. Considere it an experiment.
---
--- The basic code hasn't changed much but we had to adapt a few times to changes in
--- the api and/or work around bugs. Starting with SciTE version 3.20 there was an
--- issue with coloring. We still lacked a connection with SciTE itself (properties
--- as well as printing to the log pane) and we could not trace this (on windows).
--- However on unix we can see messages! As far as I can see, there are no
--- fundamental changes in lexer.lua or LexLPeg.cxx so it must be/have been in
--- Scintilla itself. So we went back to 3.10. Indicators of issues are: no lexing of
--- 'next' and 'goto <label>' in the Lua lexer and no brace highlighting either.
--- Interesting is that it does work ok in the cld lexer (so the Lua code is okay).
--- All seems to be ok again in later versions, so, when you update best check first
--- and just switch back to an older version as normally a SciTE update is not
--- critital. When char-def.lua lexes real fast this is a signal that the lexer quits
--- somewhere halfway. Maybe there are some hard coded limitations on the amount of
--- styles and/or length of names.
---
--- Anyway, after checking 3.24 and adapting to the new lexer tables things are okay
--- again. So, this version assumes 3.24 or higher. In 3.24 we have a different token
--- result, i.e. no longer a { tag, pattern } but just two return values. I didn't
--- check other changes but will do that when I run into issues. I had already
--- optimized these small tables by hashing which was much more efficient (and maybe
--- even more efficient than the current approach) but this is no longer needed. For
--- the moment we keep some of that code around as I don't know what happens in
--- future versions. I'm anyway still happy with this kind of lexing.
---
--- In 3.31 another major change took place: some helper constants (maybe they're no
--- longer constants) and functions were moved into the lexer modules namespace but
--- the functions are assigned to the Lua module afterward so we cannot alias them
--- beforehand. We're probably getting close to a stable interface now. At that time
--- for the first time I considered making a whole copy and patch the other functions
--- too as we need an extra nesting model. However, I don't want to maintain too
--- much. An unfortunate change in 3.03 is that no longer a script can be specified.
--- This means that instead of loading the extensions via the properties file, we now
--- need to load them in our own lexers, unless of course we replace lexer.lua
--- completely (which adds another installation issue).
---
--- Another change has been that _LEXERHOME is no longer available. It looks like
--- more and more functionality gets dropped so maybe at some point we need to ship
--- our own dll/so files. For instance, I'd like to have access to the current
--- filename and other SciTE properties. We could then cache some info with each
--- file, if only we had knowledge of what file we're dealing with. This all makes a
--- nice installation more complex and (worse) makes it hard to share files between
--- different editors usign s similar directory structure.
---
--- For huge files folding can be pretty slow and I do have some large ones that I
--- keep open all the time. Loading is normally no ussue, unless one has remembered
--- the status and the cursor is at the last line of a 200K line file. Optimizing the
--- fold function brought down loading of char-def.lua from 14 sec => 8 sec.
--- Replacing the word_match function and optimizing the lex function gained another
--- 2+ seconds. A 6 second load is quite ok for me. The changed lexer table structure
--- (no subtables) brings loading down to a few seconds.
---
--- When the lexer path is copied to the TextAdept lexer path, and the theme
--- definition to theme path (as lexer.lua), the lexer works there as well. Although
--- ... when I decided to check the state of TextAdept I had to adapt some loader
--- code. The solution is not pretty but works and also permits overloading. When I
--- have time and motive I will make a proper setup file to tune the look and feel a
--- bit more than we do now. The TextAdept editor nwo has tabs and a console so it
--- has become more useable for me (it's still somewhat slower than SciTE).
--- Interesting is that the jit version of TextAdept crashes on lexing large files
--- (and does not feel faster either; maybe a side effect of known limitations as we
--- know that Luajit is more limited than stock Lua).
---
--- Function load(lexer_name) starts with _lexers.WHITESPACE = lexer_name ..
--- '_whitespace' which means that we need to have it frozen at the moment we load
--- another lexer. Because spacing is used to revert to a parent lexer we need to
--- make sure that we load children as late as possible in order not to get the wrong
--- whitespace trigger. This took me quite a while to figure out (not being that
--- familiar with the internals). The lex and fold functions have been optimized. It
--- is a pitty that there is no proper print available. Another thing needed is a
--- default style in our own theme style definition, as otherwise we get wrong nested
--- lexers, especially if they are larger than a view. This is the hardest part of
--- getting things right.
---
--- It's a pitty that there is no scintillua library for the OSX version of SciTE.
--- Even better would be to have the scintillua library as integral part of SciTE as
--- that way I could use OSX alongside windows and linux (depending on needs). Also
--- nice would be to have a proper interface to SciTE then because currently the
--- lexer is rather isolated and the Lua version does not provide all standard
--- libraries. It would also be good to have lpeg support in the regular SciTE Lua
--- extension (currently you need to pick it up from someplace else). I keep hoping.
---
--- With 3.41 the interface changed again so it became time to look into the C++ code
--- and consider compiling and patching myself, something that I like to avoid.
--- Loading is more complicated now as the lexer gets loaded automatically so we have
--- little control over extending the code now. After a few days trying all kind of
--- solutions I decided to follow a different approach: drop in a complete
--- replacement. This of course means that I need to keep track of even more changes
--- (which for sure will happen) but at least I get rid of interferences. Till 3.60
--- the api (lexing and configuration) was simply too unstable across versions which
--- is a pitty because we expect authors to install SciTE without hassle. Maybe in a
--- few years things will have stabelized. Maybe it's also not really expected that
--- one writes lexers at all. A side effect is that I now no longer will use shipped
--- lexers for languages that I made no lexer for, but just the built-in ones in
--- addition to the ConTeXt lpeg lexers. Not that it matters much as the ConTeXt
--- lexers cover what I need (and I can always write more). For editing TeX files one
--- only needs a limited set of lexers (TeX, MetaPost, Lua, BibTeX, C/W, PDF, SQL,
--- etc). I can add more when I want.
---
--- In fact, the transition to 3.41 was triggered by an unfateful update of Ubuntu
--- which left me with an incompatible SciTE and lexer library and updating was not
--- possible due to the lack of 64 bit libraries. We'll see what the future brings.
--- For now I can use SciTE under wine on linux. The fact that scintillua ships
--- independently is a showstopper.
---
--- Promissing is that the library now can use another Lua instance so maybe some day
--- it will get properly in SciTE and we can use more clever scripting.
---
--- In some lexers we use embedded ones even if we could do it directly, The reason
--- is that when the end token is edited (e.g. -->), backtracking to the space before
--- the begin token (e.g. <!--) results in applying the surrounding whitespace which
--- in turn means that when the end token is edited right, backtracking doesn't go
--- back. One solution (in the dll) would be to backtrack several space categories.
--- After all, lexing is quite fast (applying the result is much slower).
---
--- For some reason the first blob of text tends to go wrong (pdf and web). It would
--- be nice to have 'whole doc' initial lexing. Quite fishy as it makes it impossible
--- to lex the first part well (for already opened documents) because only a partial
--- text is passed.
---
--- So, maybe I should just write this from scratch (assuming more generic usage)
--- because after all, the dll expects just tables, based on a string. I can then
--- also do some more aggressive resource sharing (needed when used generic).
---
--- I think that nested lexers are still bugged (esp over longer ranges). It never
--- was robust or maybe it's simply not meant for too complex cases (well, it
--- probably *is* tricky material). The 3.24 version was probably the best so far.
--- The fact that styles bleed between lexers even if their states are isolated is an
--- issue. Another issus is that zero characters in the text passed to the lexer can
--- mess things up (pdf files have them in streams).
---
--- For more complex 'languages', like web or xml, we need to make sure that we use
--- e.g. 'default' for spacing that makes up some construct. Ok, we then still have a
--- backtracking issue but less.
---
--- Good news for some ConTeXt users: there is now a scintillua plugin for notepad++
--- and we ship an ini file for that editor with some installation instructions
--- embedded. Also, TextAdept has a console so that we can run realtime. The spawner
--- is still not perfect (sometimes hangs) but it was enough reason to spend time on
--- making our lexer work with TextAdept and create a setup.
---
--- Some bad news. The interface changed (again) in textadept 10, some for the better
--- (but a bit different from what happens here) and some for the worse, especially
--- moving some code to the init file so we now need some bad hacks. I decided to
--- stay with the old method of defining lexers and because the lexer cannot be run
--- in parallel any more (some change in the binary?) I will probably also cleanup
--- code below as we no longer need to be compatible. Unfortunately textadept is too
--- much a moving target to simply kick in some (tex related) production flow (apart
--- from the fact that it doesn't yet have the scite like realtime console). I'll
--- keep an eye on it. Because we don't need many added features I might as well decide
--- to make a lean and mean instance (after all the license permits forking).
-
--- TRACING
---
--- The advantage is that we now can check more easily with regular Lua(TeX). We can
--- also use wine and print to the console (somehow stdout is intercepted there.) So,
--- I've added a bit of tracing. Interesting is to notice that each document gets its
--- own instance which has advantages but also means that when we are spellchecking
--- we reload the word lists each time. (In the past I assumed a shared instance and
--- took some precautions. But I can fix this.)
---
--- TODO
---
--- It would be nice if we could load some ConTeXt Lua modules (the basic set) and
--- then use resolvers and such. But it might not work well with scite.
---
--- The current lexer basics are still a mix between old and new. Maybe I should redo
--- some more. This is probably easier in TextAdept than in SciTE.
---
--- We have to make sure we don't overload ConTeXt definitions when this code is used
--- in ConTeXt. I still have to add some of the goodies that we have there in lexers
--- into these.
---
--- Maybe I should use a special stripped on the one hand and extended version of the
--- dll (stable api) and at least add a bit more interfacing to scintilla.
---
--- I need to investigate if we can use the already built in Lua instance so that we
--- can combine the power of lexing with extensions.
---
--- I need to play with hotspot and other properties like indicators (whatever they
--- are).
---
--- I want to get rid of these lexers.STYLE_XX and lexers.XX things. This is possible
--- when we give up compatibility. Generalize the helpers that I wrote for SciTE so
--- that they also can be used TextAdept.
---
--- I can make an export to ConTeXt, but first I'll redo the code that makes the
--- grammar, as we only seem to need
---
--- lexer._TOKENSTYLES : table
--- lexer._CHILDREN : flag
--- lexer._EXTRASTYLES : table
--- lexer._GRAMMAR : flag
---
--- lexers.load : function
--- lexers.lex : function
---
--- So, if we drop compatibility with other lex definitions, we can make things
--- simpler. However, in the meantime one can just do this:
---
--- context --extra=listing --scite [--compact --verycompact] somefile.tex
---
--- and get a printable document. So, this todo is a bit obsolete.
---
--- Properties is an ugly mess ... due to chages in the interface we're now left
--- with some hybrid that sort of works ok
-
--- textadept: buffer:colourise(0,-1)
-
-local lpeg = require("lpeg")
-
-local global = _G
-local find, gmatch, match, lower, upper, gsub, sub, format, byte = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub, string.sub, string.format, string.byte
-local concat, sort = table.concat, table.sort
-local type, next, setmetatable, rawset, tonumber, tostring = type, next, setmetatable, rawset, tonumber, tostring
-local R, P, S, V, C, Cp, Cs, Ct, Cmt, Cc, Cf, Cg, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Carg
-local lpegmatch = lpeg.match
-
-local usage = (textadept and "textadept") or (resolvers and "context") or "scite"
-local nesting = 0
-local output = nil
-
------ print = textadept and ui and ui.print or print -- crashes when ui is not yet defined
-
-local function print(...)
- if not output then
- output = io.open("lexer.log","w")
- end
- output:write(...,"\n")
- output:flush()
-end
-
-local function report(fmt,str,...)
- if log then
- if str then
- fmt = format(fmt,str,...)
- end
- print(format("scite lpeg lexer > %s > %s",nesting == 0 and "-" or nesting,fmt))
- end
-end
-
-local function inform(...)
- if log and trace then
- report(...)
- end
-end
-
-inform("loading context lexer module (global table: %s)",tostring(global))
-
-do
-
- local floor = math and math.floor
- local format = format
- local tonumber = tonumber
-
- if not floor then
-
- if tonumber(string.match(_VERSION,"%d%.%d")) < 5.3 then
- floor = function(n)
- return tonumber(format("%d",n))
- end
- else
- -- 5.3 has a mixed number system and format %d doesn't work with
- -- floats any longer ... no fun
- floor = function(n)
- return (n - n % 1)
- end
- end
-
- math = math or { }
-
- math.floor = floor
-
- end
-
-end
-
-local floor = math.floor
-
-if not package.searchpath then
-
- -- Unfortunately the io library is only available when we end up
- -- in this branch of code.
-
- inform("using adapted function 'package.searchpath' (if used at all)")
-
- function package.searchpath(name,path)
- local tried = { }
- for part in gmatch(path,"[^;]+") do
- local filename = gsub(part,"%?",name)
- local f = io.open(filename,"r")
- if f then
- inform("file found on path: %s",filename)
- f:close()
- return filename
- end
- tried[#tried + 1] = format("no file '%s'",filename)
- end
- -- added: local path .. for testing
- local f = io.open(filename,"r")
- if f then
- inform("file found on current path: %s",filename)
- f:close()
- return filename
- end
- --
- tried[#tried + 1] = format("no file '%s'",filename)
- return nil, concat(tried,"\n")
- end
-
-end
-
-local lexers = { }
-local context = { }
-local helpers = { }
-lexers.context = context
-lexers.helpers = helpers
-
-local patterns = { }
-context.patterns = patterns -- todo: lexers.patterns
-
-context.report = report
-context.inform = inform
-
-lexers.LEXERPATH = package.path -- can be multiple paths separated by ;
-
-if resolvers then
- -- todo: set LEXERPATH
- -- todo: set report
-end
-
-local function sortedkeys(hash) -- simple version, good enough for here
- local t, n = { }, 0
- for k, v in next, hash do
- t[#t+1] = k
- local l = #tostring(k)
- if l > n then
- n = l
- end
- end
- sort(t)
- return t, n
-end
-
-helpers.sortedkeys = sortedkeys
-
-local usedlexers = { }
-local parent_lexer = nil
-
--- The problem with styles is that there is some nasty interaction with scintilla
--- and each version of lexer dll/so has a different issue. So, from now on we will
--- just add them here. There is also a limit on some 30 styles. Maybe I should
--- hash them in order to reuse.
-
--- todo: work with proper hashes and analyze what styles are really used by a
--- lexer
-
-local default = {
- "nothing", "whitespace", "comment", "string", "number", "keyword",
- "identifier", "operator", "error", "preprocessor", "constant", "variable",
- "function", "type", "label", "embedded",
- "quote", "special", "extra", "reserved", "okay", "warning",
- "command", "internal", "preamble", "grouping", "primitive", "plain",
- "user",
- -- not used (yet) .. we cross the 32 boundary so had to patch the initializer, see (1)
- "char", "class", "data", "definition", "invisible", "regex",
- "standout", "tag",
- "text",
-}
-
-local predefined = {
- "default", "linenumber", "bracelight", "bracebad", "controlchar",
- "indentguide", "calltip",
- -- seems new
- "folddisplaytext"
-}
-
--- Bah ... ugly ... nicer would be a proper hash .. we now have properties
--- as well as STYLE_* and some connection between them ... why .. ok, we
--- could delay things but who cares. Anyway, at this moment the properties
--- are still unknown.
-
-local function preparestyles(list)
- local reverse = { }
- for i=1,#list do
- local k = list[i]
- local K = upper(k)
- local s = "style." .. k
- lexers[K] = k -- is this used
- lexers["STYLE_"..K] = "$(" .. k .. ")"
- reverse[k] = true
- end
- return reverse
-end
-
-local defaultstyles = preparestyles(default)
-local predefinedstyles = preparestyles(predefined)
-
--- These helpers are set afterwards so we delay their initialization ... there
--- is no need to alias each time again and this way we can more easily adapt
--- to updates.
-
--- These keep changing (values, functions, tables ...) so we nee to check these
--- with each update. Some of them are set in the loader (the require 'lexer' is
--- in fact not a real one as the lexer code is loaded in the dll). It's also not
--- getting more efficient.
-
--- FOLD_BASE = lexers.FOLD_BASE or SC_FOLDLEVELBASE
--- FOLD_HEADER = lexers.FOLD_HEADER or SC_FOLDLEVELHEADERFLAG
--- FOLD_BLANK = lexers.FOLD_BLANK or SC_FOLDLEVELWHITEFLAG
--- get_style_at = lexers.get_style_at or GetStyleAt
--- get_indent_amount = lexers.get_indent_amount or GetIndentAmount
--- get_property = lexers.get_property or GetProperty
--- get_fold_level = lexers.get_fold_level or GetFoldLevel
-
--- It needs checking: do we have access to all properties now? I'll clean
--- this up anyway as I want a simple clean and stable model.
-
--- This is somewhat messy. The lexer dll provides some virtual fields:
---
--- + property
--- + property_int
--- + style_at
--- + fold_level
--- + indent_amount
---
--- but for some reasons not:
---
--- + property_expanded
---
--- As a consequence we need to define it here because otherwise the
--- lexer will crash. The fuzzy thing is that we don't have to define
--- the property and property_int tables but we do have to define the
--- expanded beforehand. The folding properties are no longer interfaced
--- so the interface to scite is now rather weak (only a few hard coded
--- properties).
-
-local FOLD_BASE = 0
-local FOLD_HEADER = 0
-local FOLD_BLANK = 0
-
-local style_at = { }
-local indent_amount = { }
-local fold_level = { }
-
-local function check_main_properties()
- if not lexers.property then
- lexers.property = { }
- end
- if not lexers.property_int then
- lexers.property_int = setmetatable({ }, {
- __index = function(t,k)
- -- why the tostring .. it relies on lua casting to a number when
- -- doing a comparison
- return tonumber(lexers.property[k]) or 0 -- tostring removed
- end,
- -- __newindex = function(t,k,v)
- -- report("properties are read-only, '%s' is not changed",k)
- -- end,
- })
- end
-end
-
-lexers.property_expanded = setmetatable({ }, {
- __index = function(t,k)
- -- better be safe for future changes .. what if at some point this is
- -- made consistent in the dll ... we need to keep an eye on that
- local property = lexers.property
- if not property then
- check_main_properties()
- end
- --
--- return gsub(property[k],"[$%%]%b()", function(k)
--- return t[sub(k,3,-2)]
--- end)
- local v = property[k]
- if v then
- v = gsub(v,"[$%%]%b()", function(k)
- return t[sub(k,3,-2)]
- end)
- end
- return v
- end,
- __newindex = function(t,k,v)
- report("properties are read-only, '%s' is not changed",k)
- end,
-})
-
--- A downward compatible feature but obsolete:
-
--- local function get_property(tag,default)
--- return lexers.property_int[tag] or lexers.property[tag] or default
--- end
-
--- We still want our own properties (as it keeps changing so better play
--- safe from now on). At some point I can freeze them.
-
-local function check_properties(lexer)
- if lexer.properties then
- return lexer
- end
- check_main_properties()
- -- we use a proxy
- local mainproperties = lexers.property
- local properties = { }
- local expanded = setmetatable({ }, {
- __index = function(t,k)
- return gsub(properties[k] or mainproperties[k],"[$%%]%b()", function(k)
- return t[sub(k,3,-2)]
- end)
- end,
- })
- lexer.properties = setmetatable(properties, {
- __index = mainproperties,
- __call = function(t,k,default) -- expands
- local v = expanded[k]
- local t = type(default)
- if t == "number" then
- return tonumber(v) or default
- elseif t == "boolean" then
- return v == nil and default or v
- else
- return v or default
- end
- end,
- })
- return lexer
-end
-
--- do
--- lexers.property = { foo = 123, red = "R" }
--- local a = check_properties({}) print("a.foo",a.properties.foo)
--- a.properties.foo = "bar" print("a.foo",a.properties.foo)
--- a.properties.foo = "bar:$(red)" print("a.foo",a.properties.foo) print("a.foo",a.properties("foo"))
--- end
-
-local function set(value,default)
- if value == 0 or value == false or value == "0" then
- return false
- elseif value == 1 or value == true or value == "1" then
- return true
- else
- return default
- end
-end
-
-local function check_context_properties()
- local property = lexers.property -- let's hope that this stays
- log = set(property["lexer.context.log"], log)
- trace = set(property["lexer.context.trace"], trace)
- detail = set(property["lexer.context.detail"], detail)
- show = set(property["lexer.context.show"], show)
- collapse = set(property["lexer.context.collapse"],collapse)
- inspect = set(property["lexer.context.inspect"], inspect)
-end
-
-function context.registerproperties(p) -- global
- check_main_properties()
- local property = lexers.property -- let's hope that this stays
- for k, v in next, p do
- property[k] = v
- end
- check_context_properties()
-end
-
-context.properties = setmetatable({ }, {
- __index = lexers.property,
- __newindex = function(t,k,v)
- check_main_properties()
- lexers.property[k] = v
- check_context_properties()
- end,
-})
-
--- We want locals to we set them delayed. Once.
-
-local function initialize()
- FOLD_BASE = lexers.FOLD_BASE
- FOLD_HEADER = lexers.FOLD_HEADER
- FOLD_BLANK = lexers.FOLD_BLANK
- --
- style_at = lexers.style_at -- table
- indent_amount = lexers.indent_amount -- table
- fold_level = lexers.fold_level -- table
- --
- check_main_properties()
- --
- initialize = nil
-end
-
--- Style handler.
---
--- The property table will be set later (after loading) by the library. The
--- styleset is not needed any more as we predefine all styles as defaults
--- anyway (too bug sensitive otherwise).
-
-local function tocolors(colors)
- local colorset = { }
- local property_int = lexers.property_int or { }
- for k, v in next, colors do
- if type(v) == "table" then
- local r, g, b = v[1], v[2], v[3]
- if r and g and b then
- v = tonumber(format("%02X%02X%02X",b,g,r),16) or 0 -- hm
- elseif r then
- v = tonumber(format("%02X%02X%02X",r,r,r),16) or 0
- else
- v = 0
- end
- end
- colorset[k] = v
- property_int["color."..k] = v
- end
- return colorset
-end
-
-local function toproperty(specification)
- local serialized = { }
- for key, value in next, specification do
- if value == true then
- serialized[#serialized+1] = key
- elseif type(value) == "table" then
- local r, g, b = value[1], value[2], value[3]
- if r and g and b then
- value = format("#%02X%02X%02X",r,g,b) or "#000000"
- elseif r then
- value = format("#%02X%02X%02X",r,r,r) or "#000000"
- else
- value = "#000000"
- end
- serialized[#serialized+1] = key .. ":" .. value
- else
- serialized[#serialized+1] = key .. ":" .. tostring(value)
- end
- end
- return concat(serialized,",")
-end
-
-local function tostyles(styles)
- local styleset = { }
- local property = lexers.property or { }
- for k, v in next, styles do
- v = toproperty(v)
- styleset[k] = v
- property["style."..k] = v
- end
- return styleset
-end
-
-context.toproperty = toproperty
-context.tostyles = tostyles
-context.tocolors = tocolors
-
--- If we had one instance/state of Lua as well as all regular libraries
--- preloaded we could use the context base libraries. So, let's go poor-
--- mans solution now.
-
-function context.registerstyles(styles)
- local styleset = tostyles(styles)
- context.styles = styles
- context.styleset = styleset
- if detail then
- local t, n = sortedkeys(styleset)
- local template = " %-" .. n .. "s : %s"
- report("initializing styleset:")
- for i=1,#t do
- local k = t[i]
- report(template,k,styleset[k])
- end
- elseif trace then
- report("initializing styleset")
- end
-end
-
-function context.registercolors(colors) -- needed for textadept
- local colorset = tocolors(colors)
- context.colors = colors
- context.colorset = colorset
- if detail then
- local t, n = sortedkeys(colorset)
- local template = " %-" .. n .. "s : %i"
- report("initializing colorset:")
- for i=1,#t do
- local k = t[i]
- report(template,k,colorset[k])
- end
- elseif trace then
- report("initializing colorset")
- end
-end
-
--- Some spell checking related stuff. Unfortunately we cannot use a path set
--- by property. This will get a hook for resolvers.
-
-local locations = {
- "context/lexers", -- context lexers
- "context/lexers/data", -- context lexers
- "../lexers", -- original lexers
- "../lexers/data", -- original lexers
- ".", -- whatever
- "./data", -- whatever
-}
-
--- local function collect(name)
--- local root = gsub(lexers.LEXERPATH or ".","/.-lua$","") .. "/" -- this is a horrible hack
--- -- report("module '%s' locating '%s'",tostring(lexers),name)
--- for i=1,#locations do
--- local fullname = root .. locations[i] .. "/" .. name .. ".lua" -- so we can also check for .luc
--- if trace then
--- report("attempt to locate '%s'",fullname)
--- end
--- local okay, result = pcall(function () return dofile(fullname) end)
--- if okay then
--- return result, fullname
--- end
--- end
--- end
-
-local collect
-
-if usage == "context" then
-
- collect = function(name)
- return require(name), name
- end
-
-else
-
- collect = function(name)
- local rootlist = lexers.LEXERPATH or "."
- for root in gmatch(rootlist,"[^;]+") do
- local root = gsub(root,"/[^/]-lua$","")
- for i=1,#locations do
- local fullname = root .. "/" .. locations[i] .. "/" .. name .. ".lua" -- so we can also check for .luc
- if trace then
- report("attempt to locate '%s'",fullname)
- end
- local okay, result = pcall(function () return dofile(fullname) end)
- if okay then
- return result, fullname
- end
- end
- end
- -- return require(name), name
- end
-
-end
-
-function context.loadluafile(name)
- local data, fullname = collect(name)
- if data then
- if trace then
- report("lua file '%s' has been loaded",fullname)
- end
- return data, fullname
- end
- if not textadept then
- report("unable to load lua file '%s'",name)
- end
-end
-
--- in fact we could share more as we probably process the data but then we need
--- to have a more advanced helper
-
-local cache = { }
-
-function context.loaddefinitions(name)
- local data = cache[name]
- if data then
- if trace then
- report("reusing definitions '%s'",name)
- end
- return data
- elseif trace and data == false then
- report("definitions '%s' were not found",name)
- end
- local data, fullname = collect(name)
- if not data then
- if not textadept then
- report("unable to load definition file '%s'",name)
- end
- data = false
- elseif trace then
- report("definition file '%s' has been loaded",fullname)
- if detail then
- local t, n = sortedkeys(data)
- local template = " %-" .. n .. "s : %s"
- for i=1,#t do
- local k = t[i]
- local v = data[k]
- if type(v) ~= "table" then
- report(template,k,tostring(v))
- elseif #v > 0 then
- report(template,k,#v)
- else
- -- no need to show hash
- end
- end
- end
- end
- cache[name] = data
- return type(data) == "table" and data
-end
-
--- A bit of regression in textadept > 10 so updated ... done a bit different.
--- We don't use this in the context lexers anyway.
-
-function context.word_match(words,word_chars,case_insensitive)
- -- used to be proper tables ...
- if type(words) == "string" then
- local clean = gsub(words,"%-%-[^\n]+","")
- local split = { }
- for s in gmatch(clean,"%S+") do
- split[#split+1] = s
- end
- words = split
- end
- local list = { }
- for i=1,#words do
- list[words[i]] = true
- end
- if case_insensitive then
- for i=1,#words do
- list[lower(words[i])] = true
- end
- end
- local chars = S(word_chars or "")
- for i=1,#words do
- chars = chars + S(words[i])
- end
- local match = case_insensitive and
- function(input,index,word)
- -- We can speed mixed case if needed.
- return (list[word] or list[lower(word)]) and index or nil
- end
- or
- function(input,index,word)
- return list[word] and index or nil
- end
- return Cmt(chars^1,match)
-end
-
--- Patterns are grouped in a separate namespace but the regular lexers expect
--- shortcuts to be present in the lexers library. Maybe I'll incorporate some
--- of l-lpeg later.
-
-do
-
- local anything = P(1)
- local idtoken = R("az","AZ","\127\255","__")
- local digit = R("09")
- local sign = S("+-")
- local period = P(".")
- local octdigit = R("07")
- local hexdigit = R("09","AF","af")
- local lower = R("az")
- local upper = R("AZ")
- local alpha = upper + lower
- local space = S(" \n\r\t\f\v")
- local eol = S("\r\n")
- local backslash = P("\\")
- local decimal = digit^1
- local octal = P("0")
- * octdigit^1
- local hexadecimal = P("0") * S("xX")
- * (hexdigit^0 * period * hexdigit^1 + hexdigit^1 * period * hexdigit^0 + hexdigit^1)
- * (S("pP") * sign^-1 * hexdigit^1)^-1 -- *
- local integer = sign^-1
- * (hexadecimal + octal + decimal)
- local float = sign^-1
- * (digit^0 * period * digit^1 + digit^1 * period * digit^0 + digit^1)
- * S("eE") * sign^-1 * digit^1 -- *
-
- patterns.idtoken = idtoken
- patterns.digit = digit
- patterns.sign = sign
- patterns.period = period
- patterns.octdigit = octdigit
- patterns.hexdigit = hexdigit
- patterns.ascii = R("\000\127") -- useless
- patterns.extend = R("\000\255") -- useless
- patterns.control = R("\000\031")
- patterns.lower = lower
- patterns.upper = upper
- patterns.alpha = alpha
- patterns.decimal = decimal
- patterns.octal = octal
- patterns.hexadecimal = hexadecimal
- patterns.float = float
- patterns.cardinal = decimal
-
- patterns.signeddecimal = sign^-1 * decimal
- patterns.signedoctal = sign^-1 * octal
- patterns.signedhexadecimal = sign^-1 * hexadecimal
- patterns.integer = integer
- patterns.real =
- sign^-1 * ( -- at most one
- digit^1 * period * digit^0 -- 10.0 10.
- + digit^0 * period * digit^1 -- 0.10 .10
- + digit^1 -- 10
- )
-
- patterns.anything = anything
- patterns.any = anything
- patterns.restofline = (1-eol)^1
- patterns.space = space
- patterns.spacing = space^1
- patterns.nospacing = (1-space)^1
- patterns.eol = eol
- patterns.newline = P("\r\n") + eol
- patterns.backslash = backslash
-
- local endof = S("\n\r\f")
-
- patterns.startofline = P(function(input,index)
- return (index == 1 or lpegmatch(endof,input,index-1)) and index
- end)
-
- -- These are the expected ones for other lexers. Maybe all in own namespace
- -- and provide compatibility layer. or should I just remove them?
-
- lexers.any = anything
- lexers.ascii = ascii
- lexers.extend = extend
- lexers.alpha = alpha
- lexers.digit = digit
- lexers.alnum = alpha + digit
- lexers.lower = lower
- lexers.upper = upper
- lexers.xdigit = hexdigit
- lexers.cntrl = control
- lexers.graph = R("!~")
- lexers.print = R(" ~")
- lexers.punct = R("!/", ":@", "[\'", "{~")
- lexers.space = space
- lexers.newline = S("\r\n\f")^1
- lexers.nonnewline = 1 - lexers.newline
- lexers.nonnewline_esc = 1 - (lexers.newline + '\\') + backslash * anything
- lexers.dec_num = decimal
- lexers.oct_num = octal
- lexers.hex_num = hexadecimal
- lexers.integer = integer
- lexers.float = float
- lexers.word = (alpha + "_") * (alpha + digit + "_")^0 -- weird, why digits
-
-end
-
--- end of patterns
-
-function context.exact_match(words,word_chars,case_insensitive)
- local characters = concat(words)
- local pattern -- the concat catches _ etc
- if word_chars == true or word_chars == false or word_chars == nil then
- word_chars = ""
- end
- if type(word_chars) == "string" then
- pattern = S(characters) + patterns.idtoken
- if case_insensitive then
- pattern = pattern + S(upper(characters)) + S(lower(characters))
- end
- if word_chars ~= "" then
- pattern = pattern + S(word_chars)
- end
- elseif word_chars then
- pattern = word_chars
- end
- if case_insensitive then
- local list = { }
- if #words == 0 then
- for k, v in next, words do
- list[lower(k)] = v
- end
- else
- for i=1,#words do
- list[lower(words[i])] = true
- end
- end
- return Cmt(pattern^1, function(_,i,s)
- return list[lower(s)] -- and i or nil
- end)
- else
- local list = { }
- if #words == 0 then
- for k, v in next, words do
- list[k] = v
- end
- else
- for i=1,#words do
- list[words[i]] = true
- end
- end
- return Cmt(pattern^1, function(_,i,s)
- return list[s] -- and i or nil
- end)
- end
-end
-
-function context.just_match(words)
- local p = P(words[1])
- for i=2,#words do
- p = p + P(words[i])
- end
- return p
-end
-
--- spell checking (we can only load lua files)
---
--- return {
--- min = 3,
--- max = 40,
--- n = 12345,
--- words = {
--- ["someword"] = "someword",
--- ["anotherword"] = "Anotherword",
--- },
--- }
-
-local lists = { }
-local disabled = false
-
-function context.disablewordcheck()
- disabled = true
-end
-
-function context.setwordlist(tag,limit) -- returns hash (lowercase keys and original values)
- if not tag or tag == "" then
- return false, 3
- end
- local list = lists[tag]
- if not list then
- list = context.loaddefinitions("spell-" .. tag)
- if not list or type(list) ~= "table" then
- if not textadept then
- report("invalid spell checking list for '%s'",tag)
- end
- list = { words = false, min = 3 }
- else
- list.words = list.words or false
- list.min = list.min or 3
- end
- lists[tag] = list
- end
- if trace then
- report("enabling spell checking for '%s' with minimum '%s'",tag,list.min)
- end
- return list.words, list.min
-end
-
-patterns.wordtoken = R("az","AZ","\127\255")
-patterns.wordpattern = patterns.wordtoken^3 -- todo: if limit and #s < limit then
-
-function context.checkedword(validwords,validminimum,s,i) -- ,limit
- if not validwords then -- or #s < validminimum then
- return true, "text", i -- true, "default", i
- else
- -- keys are lower
- local word = validwords[s]
- if word == s then
- return true, "okay", i -- exact match
- elseif word then
- return true, "warning", i -- case issue
- else
- local word = validwords[lower(s)]
- if word == s then
- return true, "okay", i -- exact match
- elseif word then
- return true, "warning", i -- case issue
- elseif upper(s) == s then
- return true, "warning", i -- probably a logo or acronym
- else
- return true, "error", i
- end
- end
- end
-end
-
-function context.styleofword(validwords,validminimum,s) -- ,limit
- if not validwords or #s < validminimum then
- return "text"
- else
- -- keys are lower
- local word = validwords[s]
- if word == s then
- return "okay" -- exact match
- elseif word then
- return "warning" -- case issue
- else
- local word = validwords[lower(s)]
- if word == s then
- return "okay" -- exact match
- elseif word then
- return "warning" -- case issue
- elseif upper(s) == s then
- return "warning" -- probably a logo or acronym
- else
- return "error"
- end
- end
- end
-end
-
--- overloaded functions
-
-local h_table, b_table, n_table = { }, { }, { } -- from the time small tables were used (optimization)
-
-setmetatable(h_table, { __index = function(t,level) local v = { level, FOLD_HEADER } t[level] = v return v end })
-setmetatable(b_table, { __index = function(t,level) local v = { level, FOLD_BLANK } t[level] = v return v end })
-setmetatable(n_table, { __index = function(t,level) local v = { level } t[level] = v return v end })
-
-local newline = patterns.newline
-local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
-local p_nop = newline
-
-local folders = { }
-
--- Snippets from the > 10 code .. but we do things different so ...
-
-local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- local folder = folders[lexer]
- if not folder then
- --
- local pattern, folds, text, start_pos, line_num, prev_level, current_level
- --
- local fold_symbols = lexer._foldsymbols
- local fold_pattern = lexer._foldpattern -- use lpeg instead (context extension)
- --
- -- textadept >= 10
- --
- -- local zerosumlines = lexer.property_int["fold.on.zero.sum.lines"] > 0 -- not done
- -- local compact = lexer.property_int['fold.compact'] > 0 -- not done
- -- local lowercase = lexer._CASEINSENSITIVEFOLDPOINTS -- useless (utf will distort)
- --
- if fold_pattern then
- -- if no functions are found then we could have a faster one
- fold_pattern = Cp() * C(fold_pattern) / function(s,match)
- local symbols = fold_symbols[style_at[start_pos + s]]
- if symbols then
- local l = symbols[match]
- if l then
- current_level = current_level + l
- end
- end
- end
- local action_y = function()
- folds[line_num] = prev_level
- if current_level > prev_level then
- folds[line_num] = prev_level + FOLD_HEADER
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- prev_level = current_level
- line_num = line_num + 1
- end
- local action_n = function()
- folds[line_num] = prev_level + FOLD_BLANK
- line_num = line_num + 1
- end
- pattern = ((fold_pattern + (1-newline))^1 * newline / action_y + newline/action_n)^0
-
- else
- -- the traditional one but a bit optimized
- local fold_symbols_patterns = fold_symbols._patterns
- local action_y = function(pos,line)
- for j=1, #fold_symbols_patterns do
- for s, match in gmatch(line,fold_symbols_patterns[j]) do -- "()(" .. patterns[i] .. ")"
- local symbols = fold_symbols[style_at[start_pos + pos + s - 1]]
- local l = symbols and symbols[match]
- local t = type(l)
- if t == "number" then
- current_level = current_level + l
- elseif t == "function" then
- current_level = current_level + l(text, pos, line, s, match)
- end
- end
- end
- folds[line_num] = prev_level
- if current_level > prev_level then
- folds[line_num] = prev_level + FOLD_HEADER
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- prev_level = current_level
- line_num = line_num + 1
- end
- local action_n = function()
- folds[line_num] = prev_level + FOLD_BLANK
- line_num = line_num + 1
- end
- pattern = (p_yes/action_y + p_nop/action_n)^0
- end
- --
- local reset_parser = lexer._reset_parser
- --
- folder = function(_text_,_start_pos_,_start_line_,_start_level_)
- if reset_parser then
- reset_parser()
- end
- folds = { }
- text = _text_
- start_pos = _start_pos_
- line_num = _start_line_
- prev_level = _start_level_
- current_level = prev_level
- lpegmatch(pattern,text)
- -- make folds collectable
- local t = folds
- folds = nil
- return t
- end
- folders[lexer] = folder
- end
- return folder(text,start_pos,start_line,start_level,lexer)
-end
-
-local folds, current_line, prev_level
-
-local function action_y()
- local current_level = FOLD_BASE + indent_amount[current_line]
- if current_level > prev_level then -- next level
- local i = current_line - 1
- local f
- while true do
- f = folds[i]
- if not f then
- break
- elseif f[2] == FOLD_BLANK then
- i = i - 1
- else
- f[2] = FOLD_HEADER -- low indent
- break
- end
- end
- folds[current_line] = { current_level } -- high indent
- elseif current_level < prev_level then -- prev level
- local f = folds[current_line - 1]
- if f then
- f[1] = prev_level -- high indent
- end
- folds[current_line] = { current_level } -- low indent
- else -- same level
- folds[current_line] = { prev_level }
- end
- prev_level = current_level
- current_line = current_line + 1
-end
-
-local function action_n()
- folds[current_line] = { prev_level, FOLD_BLANK }
- current_line = current_line + 1
-end
-
-local pattern = ( S("\t ")^0 * ( (1-patterns.eol)^1 / action_y + P(true) / action_n) * newline )^0
-
-local function fold_by_indentation(text,start_pos,start_line,start_level)
- -- initialize
- folds = { }
- current_line = start_line
- prev_level = start_level
- -- define
- -- -- not here .. pattern binds and local functions are not frozen
- -- analyze
- lpegmatch(pattern,text)
- -- flatten
- for line, level in next, folds do
- folds[line] = level[1] + (level[2] or 0)
- end
- -- done, make folds collectable
- local t = folds
- folds = nil
- return t
-end
-
-local function fold_by_line(text,start_pos,start_line,start_level)
- local folds = { }
- -- can also be lpeg'd
- for _ in gmatch(text,".-\r?\n") do
- folds[start_line] = n_table[start_level] -- { start_level } -- stile tables ? needs checking
- start_line = start_line + 1
- end
- return folds
-end
-
-local threshold_by_lexer = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_parsing = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_indentation = 512 * 1024 -- we don't know the filesize yet
-local threshold_by_line = 512 * 1024 -- we don't know the filesize yet
-
-function context.fold(lexer,text,start_pos,start_line,start_level) -- hm, we had size thresholds .. where did they go
- if text == "" then
- return { }
- end
- if initialize then
- initialize()
- end
- local fold_by_lexer = lexer._fold
- local fold_by_symbols = lexer._foldsymbols
- local filesize = 0 -- we don't know that
- if fold_by_lexer then
- if filesize <= threshold_by_lexer then
- return fold_by_lexer(text,start_pos,start_line,start_level,lexer)
- end
- elseif fold_by_symbols then -- and lexer.properties("fold.by.parsing",1) > 0 then
- if filesize <= threshold_by_parsing then
- return fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- end
- elseif lexer._FOLDBYINDENTATION or lexer.properties("fold.by.indentation",1) > 0 then
- if filesize <= threshold_by_indentation then
- return fold_by_indentation(text,start_pos,start_line,start_level,lexer)
- end
- elseif lexer._FOLDBYLINE or lexer.properties("fold.by.line",1) > 0 then
- if filesize <= threshold_by_line then
- return fold_by_line(text,start_pos,start_line,start_level,lexer)
- end
- end
- return { }
-end
-
--- The following code is mostly unchanged:
-
-local function add_rule(lexer,id,rule) -- unchanged
- if not lexer._RULES then
- lexer._RULES = { }
- lexer._RULEORDER = { }
- end
- lexer._RULES[id] = rule
- lexer._RULEORDER[#lexer._RULEORDER + 1] = id
-end
-
-local function modify_rule(lexer,id,rule) -- needed for textadept > 10
- if lexer._lexer then
- lexer = lexer._lexer
- end
- lexer._RULES[id] = rule
-end
-
-local function get_rule(lexer,id) -- needed for textadept > 10
- if lexer._lexer then
- lexer = lexer._lexer
- end
- return lexer._RULES[id]
-end
-
--- I finally figured out that adding more styles was an issue because of several
--- reasons:
---
--- + in old versions there was a limit in the amount, so we overran the built-in
--- hard coded scintilla range
--- + then, the add_style function didn't check for already known ones, so again
--- we had an overrun (with some magic that could be avoided)
--- + then, when I messed with a new default set I realized that there is no check
--- in initializing _TOKENSTYLES (here the inspect function helps)
--- + of course it was mostly a side effect of passing all the used styles to the
--- _tokenstyles instead of only the not-default ones but such a thing should not
--- matter (read: intercepted)
---
--- This finally removed a head-ache and was revealed by lots of tracing, which I
--- should have built in way earlier.
-
-local function add_style(lexer,token_name,style) -- changed a bit around 3.41
- -- We don't add styles that are already defined as this can overflow the
- -- amount possible (in old versions of scintilla).
- if defaultstyles[token_name] then
- if trace and detail then
- report("default style '%s' is ignored as extra style",token_name)
- end
- if textadept then
- -- go on, stored per buffer
- else
- return
- end
- elseif predefinedstyles[token_name] then
- if trace and detail then
- report("predefined style '%s' is ignored as extra style",token_name)
- end
- if textadept then
- -- go on, stored per buffer
- else
- return
- end
- else
- if trace and detail then
- report("adding extra style '%s' as '%s'",token_name,style)
- end
- end
- -- This is unchanged. We skip the dangerous zone.
- local num_styles = lexer._numstyles
- if num_styles == 32 then
- num_styles = num_styles + 8
- end
- if num_styles >= 255 then
- report("there can't be more than %s styles",255)
- end
- lexer._TOKENSTYLES[token_name] = num_styles
- lexer._EXTRASTYLES[token_name] = style
- lexer._numstyles = num_styles + 1
- -- hm, the original (now) also copies to the parent ._lexer
-end
-
-local function check_styles(lexer)
- -- Here we also use a check for the dangerous zone. That way we can have a
- -- larger default set. The original code just assumes that #default is less
- -- than the dangerous zone's start.
- local numstyles = 0
- local tokenstyles = { }
- for i=1, #default do
- if numstyles == 32 then
- numstyles = numstyles + 8
- end
- tokenstyles[default[i]] = numstyles
- numstyles = numstyles + 1
- end
- -- Unchanged.
- for i=1, #predefined do
- tokenstyles[predefined[i]] = i + 31
- end
- lexer._TOKENSTYLES = tokenstyles
- lexer._numstyles = numstyles
- lexer._EXTRASTYLES = { }
- return lexer
-end
-
--- At some point an 'any' append showed up in the original code ...
--- but I see no need to catch that case ... beter fix the specification.
---
--- hm, why are many joined twice
-
-local function join_tokens(lexer) -- slightly different from the original (no 'any' append)
- local patterns = lexer._RULES
- local order = lexer._RULEORDER
- -- report("lexer: %s, tokens: %s",lexer._NAME,table.concat(order," + "))
- if patterns and order then
- local token_rule = patterns[order[1]] -- normally whitespace
- for i=2,#order do
- token_rule = token_rule + patterns[order[i]]
- end
- if lexer._TYPE ~= "context" then
- token_rule = token_rule + lexers.token(lexers.DEFAULT, patterns.any)
- end
- lexer._TOKENRULE = token_rule
- return token_rule
- else
- return P(1)
- end
-end
-
--- hm, maybe instead of a grammer just a flat one
-
-local function add_lexer(grammar, lexer) -- mostly the same as the original
- local token_rule = join_tokens(lexer)
- local lexer_name = lexer._NAME
- local children = lexer._CHILDREN
- for i=1,#children do
- local child = children[i]
- if child._CHILDREN then
- add_lexer(grammar, child)
- end
- local child_name = child._NAME
- local rules = child._EMBEDDEDRULES[lexer_name]
- local rules_token_rule = grammar["__" .. child_name] or rules.token_rule
- local pattern = (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1
- grammar[child_name] = pattern * V(lexer_name)
- local embedded_child = "_" .. child_name
- grammar[embedded_child] = rules.start_rule * pattern
- token_rule = V(embedded_child) + token_rule
- end
- if trace then
- report("adding lexer '%s' with %s children",lexer_name,#children)
- end
- grammar["__" .. lexer_name] = token_rule
- grammar[lexer_name] = token_rule^0
-end
-
-local function build_grammar(lexer,initial_rule) -- same as the original
- local children = lexer._CHILDREN
- local lexer_name = lexer._NAME
- local preamble = lexer._preamble
- local grammar = lexer._grammar
- -- if grammar then
- -- -- experiment
- -- elseif children then
- if children then
- if not initial_rule then
- initial_rule = lexer_name
- end
- grammar = { initial_rule }
- add_lexer(grammar, lexer)
- lexer._INITIALRULE = initial_rule
- grammar = Ct(P(grammar))
- if trace then
- report("building grammar for '%s' with whitespace '%s'and %s children",lexer_name,lexer.whitespace or "?",#children)
- end
- else
- grammar = Ct(join_tokens(lexer)^0)
- if trace then
- report("building grammar for '%s' with whitespace '%s'",lexer_name,lexer.whitespace or "?")
- end
- end
- if preamble then
- grammar = preamble^-1 * grammar
- end
- lexer._GRAMMAR = grammar
-end
-
--- So far. We need these local functions in the next one.
-
-local lineparsers = { }
-
-local maxmatched = 100
-
-local function collapsed(t)
- local lasttoken = nil
- local lastindex = nil
- for i=1,#t,2 do
- local token = t[i]
- local position = t[i+1]
- if token == lasttoken then
- t[lastindex] = position
- elseif lastindex then
- lastindex = lastindex + 1
- t[lastindex] = token
- lastindex = lastindex + 1
- t[lastindex] = position
- lasttoken = token
- else
- lastindex = i+1
- lasttoken = token
- end
- end
- for i=#t,lastindex+1,-1 do
- t[i] = nil
- end
- return t
-end
-
-local function matched(lexer,grammar,text)
- -- text = string.gsub(text,"\z","!")
- local t = lpegmatch(grammar,text)
- if trace then
- if show then
- report("output of lexer: %s (max %s entries)",lexer._NAME,maxmatched)
- local s = lexer._TOKENSTYLES
- local p = 1
- for i=1,2*maxmatched,2 do
- local n = i + 1
- local ti = t[i]
- local tn = t[n]
- if ti then
- local txt = sub(text,p,tn-1)
- if txt then
- txt = gsub(txt,"[%s]"," ")
- else
- txt = "!no text!"
- end
- report("%4i : %s > %s (%s) (%s)",floor(n/2),ti,tn,s[ti] or "!unset!",txt)
- p = tn
- else
- break
- end
- end
- end
- report("lexer results: %s, length: %s, ranges: %s",lexer._NAME,#text,floor(#t/2))
- if collapse then
- t = collapsed(t)
- report("lexer collapsed: %s, length: %s, ranges: %s",lexer._NAME,#text,floor(#t/2))
- end
- elseif collapse then
- t = collapsed(t)
- end
- return t
-end
-
--- Todo: make nice generic lexer (extra argument with start/stop commands) for
--- context itself.
---
--- In textadept >= 10 grammar building seem to have changed a bit. So, in retrospect
--- I could better have just dropped compatibility and stick to ctx lexers only.
-
-function context.lex(lexer,text,init_style)
- -- local lexer = global._LEXER
- local grammar = lexer._GRAMMAR
- if initialize then
- initialize()
- end
- if not grammar then
- return { }
- elseif lexer._LEXBYLINE then -- we could keep token
- local tokens = { }
- local offset = 0
- local noftokens = 0
- local lineparser = lineparsers[lexer]
- if not lineparser then -- probably a cmt is more efficient
- lineparser = C((1-newline)^0 * newline) / function(line)
- local length = #line
- local line_tokens = length > 0 and lpegmatch(grammar,line)
- if line_tokens then
- for i=1,#line_tokens,2 do
- noftokens = noftokens + 1
- tokens[noftokens] = line_tokens[i]
- noftokens = noftokens + 1
- tokens[noftokens] = line_tokens[i + 1] + offset
- end
- end
- offset = offset + length
- if noftokens > 0 and tokens[noftokens] ~= offset then
- noftokens = noftokens + 1
- tokens[noftokens] = "default"
- noftokens = noftokens + 1
- tokens[noftokens] = offset + 1
- end
- end
- lineparser = lineparser^0
- lineparsers[lexer] = lineparser
- end
- lpegmatch(lineparser,text)
- return tokens
- elseif lexer._CHILDREN then
- local hash = lexer._HASH -- hm, was _hash
- if not hash then
- hash = { }
- lexer._HASH = hash
- end
- grammar = hash[init_style]
- if grammar then
- lexer._GRAMMAR = grammar
- -- lexer._GRAMMAR = lexer._GRAMMAR or grammar
- else
- for style, style_num in next, lexer._TOKENSTYLES do
- if style_num == init_style then
- -- the name of the lexers is filtered from the whitespace
- -- specification .. weird code, should be a reverse hash
- local lexer_name = match(style,"^(.+)_whitespace") or lexer._NAME
- if lexer._INITIALRULE ~= lexer_name then
- grammar = hash[lexer_name]
- if not grammar then
- build_grammar(lexer,lexer_name)
- grammar = lexer._GRAMMAR
- hash[lexer_name] = grammar
- end
- end
- break
- end
- end
- grammar = grammar or lexer._GRAMMAR
- hash[init_style] = grammar
- end
- if trace then
- report("lexing '%s' with initial style '%s' and %s children", lexer._NAME,init_style,#lexer._CHILDREN or 0)
- end
- return matched(lexer,grammar,text)
- else
- if trace then
- report("lexing '%s' with initial style '%s'",lexer._NAME,init_style)
- end
- return matched(lexer,grammar,text)
- end
-end
-
--- hm, changed in 3.24 .. no longer small table but one table (so we could remove our
--- agressive optimization which worked quite well)
-
-function context.token(name, patt)
- return patt * Cc(name) * Cp()
-end
-
--- The next ones were mostly unchanged (till now), we moved it here when 3.41
--- became close to impossible to combine with cq. overload and a merge was
--- the only solution. It makes later updates more painful but the update to
--- 3.41 was already a bit of a nightmare anyway.
-
--- Loading lexers is rather interwoven with what the dll/so sets and
--- it changes over time. So, we need to keep an eye on changes. One
--- problem that we always faced were the limitations in length of
--- lexer names (as they get app/prepended occasionally to strings with
--- a hard coded limit). So, we always used alternative names and now need
--- to make sure this doesn't clash. As I no longer intend to use shipped
--- lexers I could strip away some of the code in the future, but keeping
--- it as reference makes sense.
-
--- I spend quite some time figuring out why 3.41 didn't work or crashed which
--- is hard when no stdout is available and when the io library is absent. In
--- the end of of the problems was in the _NAME setting. We set _NAME
--- to e.g. 'tex' but load from a file with a longer name, which we do
--- as we don't want to clash with existing files, we end up in
--- lexers not being found.
-
-local whitespaces = { }
-
-local function push_whitespace(name)
- table.insert(whitespaces,lexers.WHITESPACE or "whitespace")
- lexers.WHITESPACE = name .. "_whitespace"
-end
-
-local function pop_whitespace()
- lexers.WHITESPACE = table.remove(whitespaces) or "whitespace"
-end
-
-local function check_whitespace(lexer,name)
- if lexer then
- lexer.whitespace = (name or lexer.name or lexer._NAME) .. "_whitespace"
- end
-end
-
-function context.new(name,filename)
- local lexer = {
- _TYPE = "context",
- --
- _NAME = name, -- used for token building
- _FILENAME = filename, -- for diagnostic purposed
- --
- name = name,
- filename = filename,
- }
- if trace then
- report("initializing lexer tagged '%s' from file '%s'",name,filename or name)
- end
- check_whitespace(lexer)
- check_styles(lexer)
- check_properties(lexer)
- lexer._tokenstyles = context.styleset
- return lexer
-end
-
-local function nolexer(name)
- local lexer = {
- _TYPE = "unset",
- _NAME = name,
- -- _rules = { },
- }
- check_styles(lexer)
- check_whitespace(lexer)
- check_properties(lexer)
- return lexer
-end
-
-local function load_lexer(name,namespace)
- if trace then
- report("loading lexer file '%s'",name)
- end
- push_whitespace(namespace or name) -- for traditional lexers .. no alt_name yet
- local lexer, fullname = context.loadluafile(name)
- pop_whitespace()
- if not lexer then
- report("invalid lexer file '%s'",name)
- elseif trace then
- report("lexer file '%s' has been loaded",fullname)
- end
- if type(lexer) ~= "table" then
- if trace then
- report("lexer file '%s' gets a dummy lexer",name)
- end
- return nolexer(name)
- end
- if lexer._TYPE ~= "context" then
- lexer._TYPE = "native"
- check_styles(lexer)
- check_whitespace(lexer,namespace or name)
- check_properties(lexer)
- end
- if not lexer._NAME then
- lexer._NAME = name -- so: filename
- end
- if name ~= namespace then
- lexer._NAME = namespace
- end
- return lexer
-end
-
--- tracing ...
-
-local function inspect_lexer(lexer,level)
- -- If we had the regular libs available I could use the usual
- -- helpers.
- local parent = lexer._lexer
- lexer._lexer = nil -- prevent endless recursion
- local name = lexer._NAME
- local function showstyles_1(tag,styles)
- local numbers = { }
- for k, v in next, styles do
- numbers[v] = k
- end
- -- sort by number and make number hash too
- local keys = sortedkeys(numbers)
- for i=1,#keys do
- local k = keys[i]
- local v = numbers[k]
- report("[%s %s] %s %s = %s",level,name,tag,k,v)
- end
- end
- local function showstyles_2(tag,styles)
- local keys = sortedkeys(styles)
- for i=1,#keys do
- local k = keys[i]
- local v = styles[k]
- report("[%s %s] %s %s = %s",level,name,tag,k,v)
- end
- end
- local keys = sortedkeys(lexer)
- for i=1,#keys do
- local k = keys[i]
- local v = lexer[k]
- report("[%s %s] root key : %s = %s",level,name,k,tostring(v))
- end
- showstyles_1("token style",lexer._TOKENSTYLES)
- showstyles_2("extra style",lexer._EXTRASTYLES)
- local children = lexer._CHILDREN
- if children then
- for i=1,#children do
- inspect_lexer(children[i],level+1)
- end
- end
- lexer._lexer = parent
-end
-
-function context.inspect(lexer)
- inspect_lexer(lexer,0)
-end
-
--- An optional second argument has been introduced so that one can embed a lexer
--- more than once ... maybe something to look into (as not it's done by remembering
--- the start sequence ... quite okay but maybe suboptimal ... anyway, never change
--- a working solution).
-
--- namespace can be automatic: if parent then use name of parent (chain)
-
--- The original lexer framework had a rather messy user uinterface (e.g. moving
--- stuff from _rules to _RULES at some point but I could live with that. Now it uses
--- add_ helpers. But the subsystem is still not clean and pretty. Now, I can move to
--- the add_ but there is no gain in it so we support a mix which gives somewhat ugly
--- code. In fact, there should be proper subtables for this. I might actually do
--- this because we now always overload the normal lexer (parallel usage seems no
--- longer possible). For SciTE we can actually do a conceptual upgrade (more the
--- context way) because there is no further development there. That way we could
--- make even more advanced lexers.
-
-local savedrequire = require
-
-local escapes = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-function context.loadlexer(filename,namespace)
-
- if textadept then
- require = function(name)
- return savedrequire(name == "lexer" and "scite-context-lexer" or name)
- end
- end
-
- nesting = nesting + 1
- if not namespace then
- namespace = filename
- end
- local lexer = usedlexers[namespace] -- we load by filename but the internal name can be short
- if lexer then
- if trace then
- report("reusing lexer '%s'",namespace)
- end
- nesting = nesting - 1
- return lexer
- elseif trace then
- report("loading lexer '%s'",namespace)
- end
- --
- if initialize then
- initialize()
- end
- --
- parent_lexer = nil
- --
- lexer = load_lexer(filename,namespace) or nolexer(filename,namespace)
- usedlexers[filename] = lexer
- --
- if not lexer._rules and not lexer._lexer and not lexer_grammar then -- hmm should be lexer._grammar
- lexer._lexer = parent_lexer
- end
- --
- if lexer._lexer then
- local _l = lexer._lexer
- local _r = lexer._rules
- local _s = lexer._tokenstyles
- if not _l._tokenstyles then
- _l._tokenstyles = { }
- end
- if _r then
- local rules = _l._rules
- local name = lexer.name
- for i=1,#_r do
- local rule = _r[i]
- rules[#rules + 1] = {
- name .. "_" .. rule[1],
- rule[2],
- }
- end
- end
- if _s then
- local tokenstyles = _l._tokenstyles
- for token, style in next, _s do
- tokenstyles[token] = style
- end
- end
- lexer = _l
- end
- --
- local _r = lexer._rules
- local _g = lexer._grammar
- -- if _r or _g then
- if _r then
- local _s = lexer._tokenstyles
- if _s then
- for token, style in next, _s do
- add_style(lexer, token, style)
- end
- end
- if _r then
- for i=1,#_r do
- local rule = _r[i]
- add_rule(lexer, rule[1], rule[2])
- end
- end
- build_grammar(lexer)
- else
- -- other lexers
- build_grammar(lexer)
- end
- --
- add_style(lexer, lexer.whitespace, lexers.STYLE_WHITESPACE)
- --
- local foldsymbols = lexer._foldsymbols
- if foldsymbols then
- local patterns = foldsymbols._patterns
- if patterns then
- for i = 1, #patterns do
- patterns[i] = "()(" .. gsub(patterns[i],".",escapes) .. ")"
- end
- end
- end
- --
- lexer.lex = lexers.lex
- lexer.fold = lexers.fold
- --
- nesting = nesting - 1
- --
- if inspect then
- context.inspect(lexer)
- end
- --
- if textadept then
- require = savedrequire
- end
- --
- return lexer
-end
-
--- I probably need to check this occasionally with the original as I've messed around a bit
--- in the past to get nesting working well as one can hit the max number of styles, get
--- clashes due to fuzzy inheritance etc. so there is some interplay with the other patched
--- code.
-
-function context.embed_lexer(parent, child, start_rule, end_rule) -- mostly the same as the original
- local embeddedrules = child._EMBEDDEDRULES
- if not embeddedrules then
- embeddedrules = { }
- child._EMBEDDEDRULES = embeddedrules
- end
- if not child._RULES then
- local rules = child._rules
- if not rules then
- report("child lexer '%s' has no rules",child._NAME or "unknown")
- rules = { }
- child._rules = rules
- end
- for i=1,#rules do
- local rule = rules[i]
- add_rule(child, rule[1], rule[2])
- end
- end
- embeddedrules[parent._NAME] = {
- ["start_rule"] = start_rule,
- ["token_rule"] = join_tokens(child),
- ["end_rule"] = end_rule
- }
- local children = parent._CHILDREN
- if not children then
- children = { }
- parent._CHILDREN = children
- end
- children[#children + 1] = child
- local tokenstyles = parent._tokenstyles
- if not tokenstyles then
- tokenstyles = { }
- parent._tokenstyles = tokenstyles
- end
- local childname = child._NAME
- local whitespace = childname .. "_whitespace"
- tokenstyles[whitespace] = lexers.STYLE_WHITESPACE -- all these STYLE_THINGS will go .. just a proper hash
- if trace then
- report("using whitespace '%s' as trigger for '%s' with property '%s'",whitespace,childname,lexers.STYLE_WHITESPACE)
- end
- local childstyles = child._tokenstyles
- if childstyles then
- for token, style in next, childstyles do
- tokenstyles[token] = style
- end
- end
- -- new, a bit redone, untested, no clue yet what it is for
- local parentsymbols = parent._foldsymbols
- local childsymbols = child ._foldsymbols
- if not parentsymbols then
- parentsymbols = { }
- parent._foldsymbols = parentsymbols
- end
- if childsymbols then
- for token, symbols in next, childsymbols do
- local tokensymbols = parentsymbols[token]
- if not tokensymbols then
- tokensymbols = { }
- parentsymbols[token] = tokensymbols
- end
- for k, v in next, symbols do
- if type(k) == 'number' then
- tokensymbols[#tokensymbols + 1] = v
- elseif not tokensymbols[k] then
- tokensymbols[k] = v
- end
- end
- end
- end
- --
- child._lexer = parent
- parent_lexer = parent
-end
-
--- we now move the adapted code to the lexers namespace
-
-lexers.new = context.new
-lexers.load = context.loadlexer
-------.loadlexer = context.loadlexer
-lexers.loadluafile = context.loadluafile
-lexers.embed_lexer = context.embed_lexer
-lexers.fold = context.fold
-lexers.lex = context.lex
-lexers.token = context.token
-lexers.word_match = context.word_match
-lexers.exact_match = context.exact_match
-lexers.just_match = context.just_match
-lexers.inspect = context.inspect
-lexers.report = context.report
-lexers.inform = context.inform
-
--- helper .. alas ... in scite the lexer's lua instance is rather crippled .. not
--- even math is part of it
-
-do
-
- local floor = math and math.floor
- local char = string.char
- local format = format
- local tonumber = tonumber
-
- local function utfchar(n)
- if n < 0x80 then
- return char(n)
- elseif n < 0x800 then
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x40000 then
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- else
- -- return char(
- -- 0xF1 + floor(n/0x1000000),
- -- 0x80 + floor(n/0x40000),
- -- 0x80 + floor(n/0x1000),
- -- 0x80 + (floor(n/0x40) % 0x40),
- -- 0x80 + (n % 0x40)
- -- )
- return "?"
- end
- end
-
- context.utfchar = utfchar
-
- -- -- the next one is good enough for use here but not perfect (see context for a
- -- -- better one)
- --
- -- local function make(t)
- -- local p
- -- for k, v in next, t do
- -- if not p then
- -- if next(v) then
- -- p = P(k) * make(v)
- -- else
- -- p = P(k)
- -- end
- -- else
- -- if next(v) then
- -- p = p + P(k) * make(v)
- -- else
- -- p = p + P(k)
- -- end
- -- end
- -- end
- -- return p
- -- end
- --
- -- function lpeg.utfchartabletopattern(list)
- -- local tree = { }
- -- for i=1,#list do
- -- local t = tree
- -- for c in gmatch(list[i],".") do
- -- if not t[c] then
- -- t[c] = { }
- -- end
- -- t = t[c]
- -- end
- -- end
- -- return make(tree)
- -- end
-
- local utf8next = R("\128\191")
- local utf8one = R("\000\127")
- local utf8two = R("\194\223") * utf8next
- local utf8three = R("\224\239") * utf8next * utf8next
- local utf8four = R("\240\244") * utf8next * utf8next * utf8next
-
- local utfidentifier = utf8two + utf8three + utf8four
- helpers.utfidentifier = (R("AZ","az","__") + utfidentifier)
- * (R("AZ","az","__","09") + utfidentifier)^0
-
- helpers.utfcharpattern = P(1) * utf8next^0 -- unchecked but fast
- helpers.utfbytepattern = utf8one / byte
- + utf8two / function(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
- + utf8three / function(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
- + utf8four / function(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
- local p_false = P(false)
- local p_true = P(true)
-
- local function make(t)
- local function making(t)
- local p = p_false
- local keys = sortedkeys(t)
- for i=1,#keys do
- local k = keys[i]
- if k ~= "" then
- local v = t[k]
- if v == true then
- p = p + P(k) * p_true
- elseif v == false then
- -- can't happen
- else
- p = p + P(k) * making(v)
- end
- end
- end
- if t[""] then
- p = p + p_true
- end
- return p
- end
- local p = p_false
- local keys = sortedkeys(t)
- for i=1,#keys do
- local k = keys[i]
- if k ~= "" then
- local v = t[k]
- if v == true then
- p = p + P(k) * p_true
- elseif v == false then
- -- can't happen
- else
- p = p + P(k) * making(v)
- end
- end
- end
- return p
- end
-
- local function collapse(t,x)
- if type(t) ~= "table" then
- return t, x
- else
- local n = next(t)
- if n == nil then
- return t, x
- elseif next(t,n) == nil then
- -- one entry
- local k = n
- local v = t[k]
- if type(v) == "table" then
- return collapse(v,x..k)
- else
- return v, x .. k
- end
- else
- local tt = { }
- for k, v in next, t do
- local vv, kk = collapse(v,k)
- tt[kk] = vv
- end
- return tt, x
- end
- end
- end
-
- function helpers.utfchartabletopattern(list)
- local tree = { }
- local n = #list
- if n == 0 then
- for s in next, list do
- local t = tree
- local p, pk
- for c in gmatch(s,".") do
- if t == true then
- t = { [c] = true, [""] = true }
- p[pk] = t
- p = t
- t = false
- elseif t == false then
- t = { [c] = false }
- p[pk] = t
- p = t
- t = false
- else
- local tc = t[c]
- if not tc then
- tc = false
- t[c] = false
- end
- p = t
- t = tc
- end
- pk = c
- end
- if t == false then
- p[pk] = true
- elseif t == true then
- -- okay
- else
- t[""] = true
- end
- end
- else
- for i=1,n do
- local s = list[i]
- local t = tree
- local p, pk
- for c in gmatch(s,".") do
- if t == true then
- t = { [c] = true, [""] = true }
- p[pk] = t
- p = t
- t = false
- elseif t == false then
- t = { [c] = false }
- p[pk] = t
- p = t
- t = false
- else
- local tc = t[c]
- if not tc then
- tc = false
- t[c] = false
- end
- p = t
- t = tc
- end
- pk = c
- end
- if t == false then
- p[pk] = true
- elseif t == true then
- -- okay
- else
- t[""] = true
- end
- end
- end
- collapse(tree,"")
- -- inspect(tree)
- return make(tree)
- end
-
- patterns.invisibles = helpers.utfchartabletopattern {
- utfchar(0x00A0), -- nbsp
- utfchar(0x2000), -- enquad
- utfchar(0x2001), -- emquad
- utfchar(0x2002), -- enspace
- utfchar(0x2003), -- emspace
- utfchar(0x2004), -- threeperemspace
- utfchar(0x2005), -- fourperemspace
- utfchar(0x2006), -- sixperemspace
- utfchar(0x2007), -- figurespace
- utfchar(0x2008), -- punctuationspace
- utfchar(0x2009), -- breakablethinspace
- utfchar(0x200A), -- hairspace
- utfchar(0x200B), -- zerowidthspace
- utfchar(0x202F), -- narrownobreakspace
- utfchar(0x205F), -- math thinspace
- }
-
- -- now we can make:
-
- patterns.iwordtoken = patterns.wordtoken - patterns.invisibles
- patterns.iwordpattern = patterns.iwordtoken^3
-
-end
-
--- The following helpers are not used, partially replaced by other mechanisms and
--- when needed I'll first optimize them. I only made them somewhat more readable.
-
-function lexers.delimited_range(chars, single_line, no_escape, balanced) -- unchanged
- local s = sub(chars,1,1)
- local e = #chars == 2 and sub(chars,2,2) or s
- local range
- local b = balanced and s or ""
- local n = single_line and "\n" or ""
- if no_escape then
- local invalid = S(e .. n .. b)
- range = patterns.any - invalid
- else
- local invalid = S(e .. n .. b) + patterns.backslash
- range = patterns.any - invalid + patterns.backslash * patterns.any
- end
- if balanced and s ~= e then
- return P {
- s * (range + V(1))^0 * e
- }
- else
- return s * range^0 * P(e)^-1
- end
-end
-
-function lexers.starts_line(patt) -- unchanged
- return P ( function(input, index)
- if index == 1 then
- return index
- end
- local char = sub(input,index - 1,index - 1)
- if char == "\n" or char == "\r" or char == "\f" then
- return index
- end
- end ) * patt
-end
-
-function lexers.last_char_includes(s) -- unchanged
- s = "[" .. gsub(s,"[-%%%[]", "%%%1") .. "]"
- return P ( function(input, index)
- if index == 1 then
- return index
- end
- local i = index
- while match(sub(input,i - 1,i - 1),"[ \t\r\n\f]") do
- i = i - 1
- end
- if match(sub(input,i - 1,i - 1),s) then
- return index
- end
- end)
-end
-
-function lexers.nested_pair(start_chars, end_chars) -- unchanged
- local s = start_chars
- local e = P(end_chars)^-1
- return P {
- s * (patterns.any - s - end_chars + V(1))^0 * e
- }
-end
-
-local function prev_line_is_comment(prefix, text, pos, line, s) -- unchanged
- local start = find(line,"%S")
- if start < s and not find(line,prefix,start,true) then
- return false
- end
- local p = pos - 1
- if sub(text,p,p) == "\n" then
- p = p - 1
- if sub(text,p,p) == "\r" then
- p = p - 1
- end
- if sub(text,p,p) ~= "\n" then
- while p > 1 and sub(text,p - 1,p - 1) ~= "\n"
- do p = p - 1
- end
- while find(sub(text,p,p),"^[\t ]$") do
- p = p + 1
- end
- return sub(text,p,p + #prefix - 1) == prefix
- end
- end
- return false
-end
-
-local function next_line_is_comment(prefix, text, pos, line, s)
- local p = find(text,"\n",pos + s)
- if p then
- p = p + 1
- while find(sub(text,p,p),"^[\t ]$") do
- p = p + 1
- end
- return sub(text,p,p + #prefix - 1) == prefix
- end
- return false
-end
-
-function lexers.fold_line_comments(prefix)
- local property_int = lexers.property_int
- return function(text, pos, line, s)
- if property_int["fold.line.comments"] == 0 then
- return 0
- end
- if s > 1 and match(line,"^%s*()") < s then
- return 0
- end
- local prev_line_comment = prev_line_is_comment(prefix, text, pos, line, s)
- local next_line_comment = next_line_is_comment(prefix, text, pos, line, s)
- if not prev_line_comment and next_line_comment then
- return 1
- end
- if prev_line_comment and not next_line_comment then
- return -1
- end
- return 0
- end
-end
-
--- There are some fundamental changes in textadept version 10 and I don't want to
--- adapt again so we go the reverse route: map new to old. This is needed because
--- we need to load other lexers which is teh result of not being able to load the
--- lexer framework in parallel. Something happened in 10 that makes the main lexer
--- always enforced so now we need to really replace that one (and even then it loads
--- twice (i can probably sort that out). Maybe there's now some hard coded magic
--- in the binary.
-
-if textadept then
-
- -- Folds are still somewhat weak because of the end condition not being
- -- bound to a start .. probably to complex and it seems to work anyhow. As
- -- we have extended thinsg we just remap.
-
- local function add_fold_point(lexer,token_name,start_symbol,end_symbol)
- if type(start_symbol) == "string" then
- local foldsymbols = lexer._foldsymbols
- if not foldsymbols then
- foldsymbols = { }
- lexer._foldsymbols = foldsymbols
- end
- local patterns = foldsymbols._patterns
- if not patterns then
- patterns = { }
- usedpatt = { } -- > 10 uses a mixed index/hash (we don't use patterns)
- foldsymbols._patterns = patterns
- foldsymbols._usedpatt = usedpatt
- end
- local foldsymbol = foldsymbols[token_name]
- if not foldsymbol then
- foldsymbol = { }
- foldsymbols[token_name] = foldsymbol
- end
- if not usedpatt[start_symbol] then
- patterns[#patterns+1] = start_symbol
- usedpatt[start_symbol] = true
- end
- if type(end_symbol) == "string" then
- foldsymbol[start_symbol] = 1
- foldsymbol[end_symbol] = -1
- if not usedpatt[end_symbol] then
- patterns[#patterns+1] = end_symbol
- usedpatt[end_symbol] = true
- end
- else
- foldsymbol[start_symbol] = end_symbol
- end
- end
- end
-
- local function add_style(lexer,name,style)
- local tokenstyles = lexer._tokenstyles
- if not tokenstyles then
- tokenstyles = { }
- lexer._tokenstyles = tokenstyles
- end
- tokenstyles[name] = style
- end
-
- local function add_rule(lexer,id,rule)
- local rules = lexer._rules
- if not rules then
- rules = { }
- lexer._rules = rules
- end
- rules[#rules+1] = { id, rule }
- end
-
- local function modify_rule(lexer,id,rule) -- needed for textadept > 10
- if lexer._lexer then
- lexer = lexer._lexer
- end
- local RULES = lexer._RULES
- if RULES then
- RULES[id] = rule
- end
- end
-
- local function get_rule(lexer,id) -- needed for textadept > 10
- if lexer._lexer then
- lexer = lexer._lexer
- end
- local RULES = lexer._RULES
- if RULES then
- return RULES[id]
- end
- end
-
- local new = context.new
- local lmt = {
- __index = {
-
- add_rule = add_rule,
- modify_rule = modify_rule,
- get_rule = get_rule,
- add_style = add_style,
- add_fold_point = add_fold_point,
-
- join_tokens = join_tokens,
- build_grammar = build_grammar,
-
- embed = lexers.embed,
- lex = lexers.lex,
- fold = lexers.fold
-
- }
- }
-
- function lexers.new(name,options)
- local lexer = new(name)
- if options then
- lexer._LEXBYLINE = options['lex_by_line']
- lexer._FOLDBYINDENTATION = options['fold_by_indentation']
- lexer._CASEINSENSITIVEFOLDPOINTS = options['case_insensitive_fold_points']
- lexer._lexer = options['inherit']
- end
- setmetatable(lexer,lmt)
- return lexer
- end
-
-end
-
--- done
-
-return lexers
diff --git a/context/data/textadept/context/lexers/text.lua b/context/data/textadept/context/lexers/text.lua
deleted file mode 100644
index 5d3096b7d..000000000
--- a/context/data/textadept/context/lexers/text.lua
+++ /dev/null
@@ -1,35 +0,0 @@
-local info = {
- version = 1.002,
- comment = "scintilla lpeg lexer that triggers whitespace backtracking",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- the lexer dll doesn't backtrack when there is no embedded lexer so
--- we need to trigger that, for instance in the bibtex lexer, but still
--- we get failed lexing
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local patterns = context.patterns
-
-local token = lexer.token
-
-local dummylexer = lexer.new("dummy","scite-context-lexer-dummy")
-local whitespace = dummylexer.whitespace
-
-local space = patterns.space
-local nospace = (1-space)
-
-local t_spacing = token(whitespace, space ^1)
-local t_rest = token("default", nospace^1)
-
-dummylexer._rules = {
- { "whitespace", t_spacing },
- { "rest", t_rest },
-}
-
-dummylexer._tokenstyles = context.styleset
-
-return dummylexer
diff --git a/context/data/textadept/context/modules/textadept-context-files.lua b/context/data/textadept/context/modules/textadept-context-files.lua
deleted file mode 100644
index ef016372e..000000000
--- a/context/data/textadept/context/modules/textadept-context-files.lua
+++ /dev/null
@@ -1,826 +0,0 @@
-local info = {
- version = 1.002,
- comment = "file handler for textadept for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-
-local char, format, gsub = string.char, string.format, string.gsub
-
--- What is _CHARSET doing ... I don't want any messing with conversion at all. Scite is
--- more clever with e.g. pdf. How can I show non ascii as escapes.
-
--- I'll add a few more keybindings. I mostly made this file for my own use and as a
--- fallback for SciTE as it runs on all os's. It might evolve ... who knows.
-
-io.encodings = {
- "UTF-8",
- "ASCII",
- "UTF-16",
-}
-
--- We need this for for instance pdf files (faster too):
-
-local sevenbitascii = { }
-for i=127,255 do
- sevenbitascii[char(i)] = format("0x%02X",i)
-end
-
-local function setsevenbitascii(buffer)
- -- we cannot directly assign sevenbitascii to buffer
- local representation = buffer.representation
- for k, v in next, sevenbitascii do
- representation[k] = v
- end
-end
-
--- Here we rebind keys. For this we need to load the alternative runner framework. I will
--- probably change the menu.
-
-local oldrunner = textadept.run
-local runner = require("textadept-context-runner")
-
--- local function userunner(runner)
--- --
--- keys [OSX and 'mr' or 'cr' ] = runner.process or runner.run
--- keys [OSX and 'mR' or (GUI and 'cR' or 'cmr')] = runner.check or runner.compile
--- keys [OSX and 'mB' or (GUI and 'cB' or 'cmb')] = runner.preview or runner.build
--- keys [OSX and 'mX' or (GUI and 'cX' or 'cmx')] = runner.quit or runner.stop
--- --
--- textadept.menu.menubar [_L['_Tools']] [_L['_Run']] [2] = runner.process or runner.run
--- textadept.menu.menubar [_L['_Tools']] [_L['_Compile']] [2] = runner.check or runner.compile
--- textadept.menu.menubar [_L['_Tools']] [_L['Buil_d']] [2] = runner.preview or runner.build
--- textadept.menu.menubar [_L['_Tools']] [_L['S_top']] [2] = runner.quit or runner.stop
--- --
--- end
-
--- I played a while with supporting both systems alongsize but getting the menus
--- synchronized is a real pain and semi-random. So, I decided to just drop the
--- old. And I don't want to implement a full variant now. Anyway, after that
--- conclusion I decided to replace not only the tools menu.
-
-local SEPARATOR = { "" }
-local newmenu = { }
-local newkeys = { }
-
-do
-
- newmenu.file = {
-
- title = "_File",
-
- { "_New", buffer.new },
- { "_Open", io.open_file },
- { "Open _Recent", io.open_recent_file },
- { "Re_load", io.reload_file },
- { "_Save", io.save_file },
- { "Save _As", io.save_file_as },
- { "Save All", io.save_all_files },
-
- SEPARATOR,
-
- { "_Close", io.close_buffer },
- { "Close All", io.close_all_buffers },
-
- -- SEPARATOR,
-
- -- { "Loa_d Session", textadept.session.load },
- -- { "Sav_e Session", textadept.session.save },
-
- SEPARATOR,
-
- { _L["_Quit"], quit },
-
- }
-
- -- maybe just the same keys on all ... or duplicate on osx
-
- newkeys[OSX and "mn" or "cn"] = buffer.new
- newkeys[OSX and "mo" or "co"] = io.open_file
- newkeys[OSX and "cmo" or "cao"] = io.open_recent_file
- newkeys[OSX and "ms" or "cs"] = io.save_file
- newkeys[OSX and "mS" or "cS"] = io.save_file_as
- newkeys[OSX and "mw" or "cw"] = io.close_buffer
- newkeys[OSX and "mW" or "cW"] = io.close_all_buffers
-
-end
-
-do
-
- local function complete_symbol()
- textadept.editing.autocomplete(buffer:get_lexer(true))
- end
-
- newmenu.edit = {
-
- title = "_Edit",
-
- SEPARATOR,
-
- { "_Undo", buffer.undo },
- { "_Redo", buffer.redo },
-
- SEPARATOR,
-
- { "Cu_t", buffer.cut },
- { "_Copy", buffer.copy },
- { "_Paste", buffer.paste },
- { "_Delete", buffer.clear },
- { "Select _All", buffer.select_all },
-
- SEPARATOR,
-
- { "Duplicate _Line", buffer.line_duplicate },
-
- SEPARATOR,
-
- { "Toggle _Block Comment", runner.blockcomment },
- { "_Upper Case Selection", buffer.upper_case },
- { "_Lower Case Selection", buffer.lower_case },
-
- }
-
- newkeys[OSX and "mz" or "cz"] = buffer.undo
- newkeys[OSX and "my" or "cy"] = buffer.redo
- newkeys[OSX and "mx" or "cx"] = buffer.cut
- newkeys[OSX and "mc" or "cc"] = buffer.copy
- newkeys[OSX and "mv" or "cv"] = buffer.paste
- newkeys[OSX and "ma" or "ca"] = buffer.select_all
-
- newkeys[OSX and "mD" or "cD"] = textadept.editing.select_word
- newkeys[OSX and "mN" or "cN"] = textadept.editing.select_line
- newkeys[OSX and "mP" or "cP"] = textadept.editing.select_paragraph
-
- newkeys["del"] = buffer.clear
- newkeys["cy"] = buffer.redo
-
- newkeys[OSX and "md" or "cd"] = buffer.line_duplicate
- newkeys[OSX and "cu" or "cau"] = buffer.upper_case
- newkeys[OSX and "cU" or "caU"] = buffer.lower_case
-
- newkeys[OSX and "mq" or "cq"] = runner.blockcomment
-
- newkeys[OSX and "ml" or "cl"] = buffer.line_delete
-
- -- Do I ever use these?
-
- -- newkeys["cf"] = buffer.char_right
- -- newkeys["cF"] = buffer.char_right_extend
- -- newkeys["cmf"] = buffer.word_right
- -- newkeys["cmF"] = buffer.word_right_extend
- -- newkeys["cb"] = buffer.char_left
- -- newkeys["cB"] = buffer.char_left_extend
- -- newkeys["cmb"] = buffer.word_left
- -- newkeys["cmB"] = buffer.word_left_extend
- -- newkeys["cn"] = buffer.line_down
- -- newkeys["cN"] = buffer.line_down_extend
- -- newkeys["cp"] = buffer.line_up
- -- newkeys["cP"] = buffer.line_up_extend
- -- newkeys["ca"] = buffer.vc_home
- -- newkeys["cA"] = buffer.vc_home_extend
- -- newkeys["ce"] = buffer.line_end
- -- newkeys["cE"] = buffer.line_end_extend
- -- newkeys["aright"] = buffer.word_right
- -- newkeys["aleft"] = buffer.word_left
- -- newkeys["cdv"] = buffer.clear
- -- newkeys["ck"] = function() buffer:line_end_extend() buffer:cut() end
- -- newkeys["cl"] = buffer.vertical_centre_caret
-
- newkeys.fn = OSX and function() return true end or nil
-
- newkeys[OSX and "c@" or "c "] = complete_symbol
-
-end
-
-do
-
- local function find_in_file()
- ui.find.in_files = false
- ui.find.focus()
- end
-
- local function find_in_files()
- ui.find.in_files = true
- ui.find.focus()
- end
-
- local function find_next_in_files()
- ui.find.goto_file_found(false,true)
- end
-
- local function find_previous_in_files()
- ui.find.goto_file_found(false,false)
- end
-
- newmenu.search = {
-
- title = "_Search",
-
- SEPARATOR,
-
- { "_Find", find_in_file },
- { "Find _Next", ui.find.find_next },
- { "Find _Previous", ui.find.find_prev },
- { "_Replace", ui.find.replace },
- { "Replace _All", ui.find.replace_all },
- { "Find _Incremental", ui.find.find_incremental },
-
- SEPARATOR,
-
- { "Find in Fi_les", find_in_files },
- { "Goto Nex_t File Found", find_next_in_files },
- { "Goto Previou_s File Found", find_previous_in_files },
-
- SEPARATOR,
-
- { "_Jump to", textadept.editing.goto_line }
-
- }
-
- -- The few times I use osx I want the same keys ... better explicitly handle
- -- "not GUI" but I have to test the curses version first anyway.
-
- newkeys[OSX and "mf" or "cf"] = find_in_file
- newkeys[OSX and "mg" or "cg"] = ui.find.find_next
- newkeys[OSX and "mG" or "cG"] = ui.find.find_prev
- newkeys[OSX and "mg" or "cg"] = textadept.editing.goto_line
-
- newkeys["f3"] = not OSX and ui.find.find_next or nil
- newkeys["sf3"] = not OSX and ui.find.find_prev or nil
-
- newkeys[OSX and "cr" or "car"] = ui.find.replace
- newkeys[OSX and "cR" or "caR"] = ui.find.replace_all
- newkeys[OSX and "cmf" or "caf"] = ui.find.find_incremental
-
- newkeys[OSX and "mF" or "cF"] = find_in_files
- newkeys[OSX and "cmg" or "cag"] = find_next_in_files
- newkeys[OSX and "cmG" or "caG"] = find_previous_in_files
-
-end
-
-do
-
- io.quick_open_max = 5000
-
- local function isdir(path)
- return path and path ~= "" and lfs.attributes(path,"mode") == "directory"
- end
-
- local function resolveroot(path)
- local path = runner.resultof("mtxrun --resolve-path TEXMFCONTEXT")
- if path then
- return string.match(path,"(.-)%s$")
- end
- end
-
- local function opencurrentdirectory()
- local path = buffer.filename
- if path and path ~= "" then
- path = string.match(path,"^(.+)[/\\]")
- if isdir(path) then
- io.quick_open(path)
- end
- end
- end
-
- local function openuserdirectory()
- local path = resolveroot("TEXMFPROJECT")
- if isdir(path) then
- io.quick_open(path .. "/tex/context/user")
- end
- end
-
- local function openbasedirectory()
- local path = resolveroot("TEXMFCONTEXT")
- if isdir(path) then
- io.quick_open(path .. "/tex/context/base/mkiv")
- end
- end
-
- local started = false
-
- local function startservice()
- if WIN32 then
- os.execute([[cmd /c start /min "Context Documentation" mtxrun --script server --auto]])
- else
- os.execute([[mtxrun --script server --start > ~/context-wwwserver.log &]])
- end
- started = true
- end
-
- local function showcommand()
- -- if not started then
- -- startservice()
- -- end
- local start = buffer.selection_n_start[0]
- local stop = buffer.selection_n_end[0]
- if start == stop then
- buffer:set_target_range(buffer:word_start_position(start,true),buffer:word_end_position(stop,true))
- else
- buffer:set_target_range(start,stop)
- end
- local word = buffer.target_text
- os.execute(format([[mtxrun --gethelp --url="http://localhost:8088/mtx-server-ctx-help.lua?command=%s"]],word or ""))
- end
-
- newmenu.tools = {
-
- title = "_Tools",
-
- SEPARATOR,
-
- { "Check Source", runner.check },
- { "Process Source", runner.process },
- { "Preview Result", runner.preview },
- { "Show Log File", runner.install("logfile") },
- { "Quit", runner.quit },
-
- SEPARATOR,
-
- { "Open Current Directory", opencurrentdirectory },
- { "Open User Directory", openuserdirectory },
- { "Open Base Directory", openbasedirectory },
-
- SEPARATOR,
-
- { "Purge Files", runner.install("purge") },
- { "Clear Cache", runner.install("clear") },
- { "Generate File Database", runner.install("generate") },
- { "Generate Font Database", runner.install("fonts") },
-
- SEPARATOR,
-
- { "Typeset Listing", runner.install("listing") },
- { "Process and Arrange", runner.install("arrange") },
-
- SEPARATOR,
-
- { "Start Document Service", startservice },
- { "Goto Document Service", showcommand },
-
- SEPARATOR,
-
- { "Show Unicodes", runner.unicodes },
-
- -- We need this bogus entry because otherwise we get a message due to macros.lua. I
- -- might need to come up with some proper placeholder. Well, let's for now just
- -- live with the crash.
-
- -- SEPARATOR,
- --
- -- { "Select Co_mmand", textadept.editing.goto_line },
-
-}
-
- -- newkeys[OSX and "mc" or "cc"] = runner.check
- newkeys[OSX and "mr" or "cr"] = runner.process
- newkeys[OSX and "mp" or "cp"] = runner.preview
- -- newkeys[OSX and "mx" or "cx"] = runner.quit -- makes no sense
-
- newkeys["f7"] = runner.process
- newkeys["f12"] = runner.process
-
- newkeys["f2"] = runner.unicodes
-
-end
-
-do
-
- local function use_tabs()
- buffer.use_tabs = not buffer.use_tabs
- events.emit(events.UPDATE_UI) -- for updating statusbar
- end
-
- local function set_eol_mode_crlf()
- set_eol_mode(buffer.EOL_CRLF)
- end
-
- local function set_eol_mode_lf()
- set_eol_mode(buffer.EOL_LF)
- end
-
- local function show_eol()
- buffer.view_eol = not buffer.view_eol
- end
-
- local function wrap_mode()
- buffer.wrap_mode = buffer.wrap_mode == 0 and buffer.WRAP_WHITESPACE or 0
- end
-
- function show_white_space()
- buffer.view_ws = buffer.view_ws == 0 and buffer.WS_VISIBLEALWAYS or 0
- end
-
- local function update_lexing()
- buffer:colourise(0,-1)
- end
-
- function set_endoding_utf8()
- set_encoding("UTF-8")
- end
-
- function set_encoding_ascii()
- set_encoding("ASCII")
- end
-
- function set_endoding_utf16le()
- set_encoding("UTF-16LE")
- end
-
- function set_endoding_utf16Be()
- set_encoding("UTF-16BE")
- end
-
- function goto_prev_buffer()
- view:goto_buffer(-1)
- end
-
- function goto_next_buffer()
- view:goto_buffer(1)
- end
-
- newmenu.buffer = {
-
- title = "_Buffer",
-
- SEPARATOR,
-
- { "_Previous Buffer", goto_prev_buffer },
- { "_Next Buffer", goto_next_buffer },
- { "_Switch to Buffer", ui.switch_buffer },
-
- SEPARATOR,
-
- { "_Toggle Use Tabs", use_tabs },
- {
- title = "EOL Mode",
-
- { "_CRLF", set_eol_mode_crlf },
- { "_LF", set_eol_mode_lf },
- },
- {
- title = "Encoding",
-
- { "_ASCII", set_encoding_ascii },
- { "_UTF-8", set_encoding_utf8 },
- { "UTF-16-_BE", set_encoding_utf16le },
- { "UTF-16-_LE", set_encoding_utf16be },
- },
-
- SEPARATOR,
-
- { "Toggle View _EOL", show_eol },
- { "Toggle _Wrap Mode", wrap_mode },
- { "Toggle View _Spacing", show_whitespace },
-
- SEPARATOR,
-
- { "Select _Lexer", textadept.file_types.select_lexer },
- { "Refresh _Syntax Highlighting", update_lexing }
-
- }
-
- newkeys["f5"] = update_lexing
-
- newkeys[OSX and "mp" or "cs\t"] = goto_prev_buffer
- newkeys[OSX and "mn" or "c\t"] = goto_next_buffer
- newkeys[OSX and "mb" or "cb"] = ui.switch_buffer
-
-end
-
-do
-
- local function toggle_current_fold()
- buffer:toggle_fold(buffer:line_from_position(buffer.current_pos))
- end
-
- local function toggle_show_guides()
- local off = buffer.indentation_guides == 0
- buffer.indentation_guides = off and buffer.IV_LOOKBOTH or 0
- end
-
- local function toggle_virtual_space()
- local off = buffer.virtual_space_options == 0
- buffer.virtual_space_options = off and buffer.VS_USERACCESSIBLE or 0
- end
-
- local function reset_zoom()
- buffer.zoom = 0
- end
-
- newmenu.view = {
-
- title = "_View",
-
- SEPARATOR,
-
- { "Toggle Current _Fold" , toggle_current_fold },
-
- SEPARATOR,
-
- { "Toggle Show In_dent Guides", toggle_show_guides },
- { "Toggle _Virtual Space", toggle_virtual_space },
-
- SEPARATOR,
-
- { "Zoom _In", buffer.zoom_in },
- { "Zoom _Out", buffer.zoom_out },
- { "_Reset Zoom", reset_zoom },
-
- }
-
- newkeys[OSX and "m=" or "c="] = buffer.zoom_in
- newkeys[OSX and "m-" or "c-"] = buffer.zoom_out
- newkeys[OSX and "m0" or "c0"] = reset_zoom
-
-end
-
-do
-
- -- It"s a pitt y that we can"t have a proper monospaced font here so we try to make the best of it:
-
- local template = "\n\trelease info: %s\t\n\n\tcopyright: %s\t\n\n\tvariant: ConTeXt related editing\t\n\n\tadapted by: Hans Hagen\t"
-
- function show_about()
- ui.dialogs.msgbox {
- title = "about",
- informative_text = format(template,(gsub(_RELEASE,"%s+"," ")),(gsub(_COPYRIGHT,"%s+"," ")))
- }
- end
-
- local function open_url(url) -- adapted from non public open_page
- local cmd = (WIN32 and 'start ""') or (OSX and "open") or "xdg-open"
- os.spawn(format('%s "%s"', cmd, url))
- end
-
-
- newmenu.help = {
-
- title = "_Help",
-
- SEPARATOR,
-
- { "ConTeXt garden wiki", function() open_url("http://www.contextgarden.net") end },
-
- -- SEPARATOR,
-
- { "_About", show_about }
-
- }
-
-end
-
-do
-
- -- from shift F11 and startup script
-
- table.insert(textadept.menu.context_menu, SEPARATOR)
-
- table.insert(textadept.menu.context_menu, { "wrap", runner.wrap })
- table.insert(textadept.menu.context_menu, { "unwrap", runner.unwrap })
- table.insert(textadept.menu.context_menu, { "sort", runner.sort })
- -- table.insert(textadept.menu.context_menu, { "document", function() end })
- -- table.insert(textadept.menu.context_menu, { "quote", function() end })
- -- table.insert(textadept.menu.context_menu, { "compound", function() end })
- -- table.insert(textadept.menu.context_menu, { "add", function() end })
- -- table.insert(textadept.menu.context_menu, { "bidi", function() end })
- -- table.insert(textadept.menu.context_menu, { "strip", function() end })
-
-
- -- cM wrap
- -- cR reset spelling
- -- cI insert template
- -- cE show log
- -- c+ toggle strip
-
- newkeys[OSX and "mm" or "cm"] = runner.wrap
-
-end
-
-do
-
- local function replace(oldmenu,newmenu)
- local n = #newmenu
- local o = #oldmenu
- for i=1,n do
- oldmenu[i] = newmenu[i]
- end
- for i=o,n+1,-1 do
- oldmenu[i] = nil
- end
- end
-
- replace(textadept.menu.menubar [_L["_File"]], newmenu.file)
- replace(textadept.menu.menubar [_L["_Edit"]], newmenu.edit)
- replace(textadept.menu.menubar [_L["_Search"]], newmenu.search)
- replace(textadept.menu.menubar [_L["_Tools"]], newmenu.tools)
- replace(textadept.menu.menubar [_L["_Buffer"]], newmenu.buffer)
- replace(textadept.menu.menubar [_L["_View"]], newmenu.view)
- replace(textadept.menu.menubar [_L["_Help"]], newmenu.help)
-
- local char = string.char
-
- local combi = {
- "c", "m", "a",
- "cm", "ca", "ma",
- }
-
- local pad = {
- "esc", "del", "bs",
- "up", "down", "left", "right",
- "end", "home",
- "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12",
- }
-
- local s = "s"
-
- for i=1,#combi do
- local c = combi[i]
- for i=0x20,0x40 do
- local ci = char(i)
- keys[ c..ci] = nil
- keys[s..c..ci] = nil
- end
- for i=0x41,0x5A do -- A .. Z
- local ci = char(i)
- keys[ c..ci] = nil
- end
- for i=0x5B,0x60 do
- local ci = char(i)
- keys[ c..ci] = nil
- keys[s..c..ci] = nil
- end
- for i=0x61,0x7A do -- a .. z
- local ci = char(i)
- keys[ c..ci] = nil
- end
- for i=0x7B,0x7F do
- local ci = char(i)
- keys[ c..ci] = nil
- keys[s..c..ci] = nil
- end
- for i=1,#pad do
- local pi = pad[i]
- keys[ c..pi] = nil
- keys[s..c..pi] = nil
- end
- end
-
- for k, v in next, newkeys do
- keys[k] = v
- end
-
- -- add helper to textadept-context-runner.lua instead
-
- events.connect(events.INITIALIZED, function()
- for i=1,#_BUFFERS do
- local buffer = _BUFFERS[i]
- if buffer._type == OUTPUT_BUFFER then
- view:goto_buffer(i)
- io.close_buffer()
- return
- end
- end
- end)
-
-end
-
--- We have a different way to set up files and runners. Less distributed and morein the way we
--- do things in context.
-
-local dummyrunner = function() end
-local extensions = textadept.file_types.extensions
-local specifications = runner.specifications
-local setters = { }
-local defaults = {
- check = dummyrunner,
- process = dummyrunner,
- preview = dummyrunner,
-}
-
-setmetatable(specifications, { __index = defaults })
-
-function context.install(specification)
- local suffixes = specification.suffixes
- if suffixes then
- local lexer = specification.lexer
- local setter = specification.setter
- local encoding = specification.encoding
- for i=1,#suffixes do
- local suffix = suffixes[i]
- if lexer and extensions then
- extensions[suffix] = lexer
- end
- specifications[suffix] = specification
- if lexer then
- setters[lexer] = function()
- if encoding == "7-BIT-ASCII" then
- setsevenbitascii(buffer)
- end
- if setter then
- setter(lexer)
- end
- end
- end
- end
- end
-end
-
--- Too much interference so I might drop all the old stuff eventually.
-
-local function synchronize(lexer)
- if lexer then
- local setter = lexer and setters[lexer]
- if setter then
- local action = context.synchronize
- if action then
- action()
- end
- -- userunner()
- setter(lexer)
- else
- -- useoldrunner()
- end
- end
-end
-
-events.connect(events.FILE_OPENED,function(filename)
- synchronize(buffer:get_lexer())
-end)
-
-events.connect(events.LEXER_LOADED,function(lexer)
- synchronize(lexer)
-end)
-
--- obsolete
-
--- events.connect(events.BUFFER_AFTER_SWITCH,function()
--- synchronize(buffer:get_lexer())
--- end)
-
--- events.connect(events.VIEW_AFTER_SWITCH,function()
--- synchronize(buffer:get_lexer())
--- end)
-
--- events.connect(events.BUFFER_NEW,function()
--- synchronize(buffer:get_lexer())
--- end)
-
--- events.connect(events.VIEW_NEW,function()
--- synchronize(buffer:get_lexer())
--- end)
-
--- events.connect(events.RESET_AFTER,function()
--- synchronize(buffer:get_lexer())
--- end)
-
--- local oldtools = { }
--- local usingold = false
--- local toolsmenu = textadept.menu.menubar [_L['_Tools']]
---
--- for i=1,#toolsmenu do
--- oldtools[i] = toolsmenu[i]
--- end
---
--- local function replace(tools)
--- local n = #toolsmenu
--- local m = #tools
--- for i=1,m do
--- toolsmenu[i] = tools[i]
--- end
--- for i=n,m+1,-1 do
--- toolsmenu[i] = nil
--- end
--- end
---
--- local function useoldrunner()
--- if not usingold then
--- keys [OSX and 'mr' or 'cr' ] = oldrunner.run
--- keys [OSX and 'mR' or (GUI and 'cR' or 'cmr')] = oldrunner.compile
--- keys [OSX and 'mB' or (GUI and 'cB' or 'cmb')] = oldrunner.build
--- keys [OSX and 'mX' or (GUI and 'cX' or 'cmx')] = oldrunner.stop
--- --
--- replace(oldtools)
--- --
--- usingold = true
--- end
--- end
---
--- local function userunner()
--- if usingold then
--- keys [OSX and 'mr' or 'cr' ] = runner.process
--- keys [OSX and 'mR' or (GUI and 'cR' or 'cmr')] = runner.check
--- keys [OSX and 'mB' or (GUI and 'cB' or 'cmb')] = runner.preview
--- keys [OSX and 'mX' or (GUI and 'cX' or 'cmx')] = runner.quit
--- --
--- replace(newtools)
--- --
--- usingold = false
--- end
--- end
---
--- userunner()
diff --git a/context/data/textadept/context/modules/textadept-context-runner.lua b/context/data/textadept/context/modules/textadept-context-runner.lua
deleted file mode 100644
index 1181b13a3..000000000
--- a/context/data/textadept/context/modules/textadept-context-runner.lua
+++ /dev/null
@@ -1,1100 +0,0 @@
-local info = {
- version = 1.002,
- comment = "prototype textadept runner for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- This is an adapted version of the run code by mitchell.att.foicica.corunner. The main
--- reason I started patching is that long lines got broken in the middle so we needed
--- to have a more clever line splitter that saves half of a line for later. Then I
--- decided to come up with a few more variants so in the end ... it's just too tempting
--- make something that exactly suits out needs. In fact, maybe I'll do that some day:
--- take core textadept and make a dedicated variant for the kind of processing that we
--- do and make it suitable for document authors (the manual says that is doable). In that
--- case I can also use a lot of already written helpers.
---
--- The error scanner is not needed. If I need one, it will be using a lexers applied
--- afterwards because working on half lines is not going to work out well anyway.
---
--- Here I removed iconv calls as in context we use utf (less hassle with fonts too). One
--- can always use the original approach.
---
--- The events seems to have hard coded names, Also, the name of the message buffer cannot
--- be changes because otherwise we get a message when the session is restored. I don't
--- care about locales.
---
--- Somehow the process hangs when I refresh the pdf viewer, this doesn't happen in scite so
--- the underlying code is for the moment less reliant.
-
-local match, gsub, find, format, gmatch, rep = string.match, string.gsub, string.find, string.format, string.gmatch, string.rep
-local char, lower, upper, sub = string.char, string.lower, string.upper, string.sub
-local concat, sort = table.concat, table.sort
-local assert, type = assert, type
-
-local original = textadept.run
-local runner = { }
-
-runner.MARK_WARNING = original.MARK_WARNING
-runner.MARK_ERROR = original.MARK_ERROR
-
-local specifications = { }
-runner.specifications = specifications
-
------ RUNNER_EVENT = "[Context Runner]"
-local OUTPUT_BUFFER = '[Message Buffer]' -- CONSOLE
-
------ events.RUNNER_EVENT = RUNNER_EVENT
-
-local currentprocess = nil
-local xbuffer = nil
-
-local function find_buffer(buffer_type)
- for i=1,#_BUFFERS do
- local buffer = _BUFFERS[i]
- if buffer._type == buffer_type then
- return buffer
- end
- end
-end
-
-local function print_output(str)
- local print_buffer = find_buffer(OUTPUT_BUFFER)
- -- some simplified magic copied from the adeptext runner
- if not print_buffer then
- if not ui.tabs then
- view:split()
- end
- print_buffer = buffer.new()
- print_buffer._type = OUTPUT_BUFFER
- events.emit(events.FILE_OPENED)
- else
- for i=1,#_VIEWS do
- local view = _VIEWS[i]
- if view.buffer._type == OUTPUT_BUFFER then
- ui.goto_view(view)
- break
- end
- end
- if view.buffer._type ~= OUTPUT_BUFFER then
- view:goto_buffer(print_buffer)
- end
- end
- print_buffer:append_text(str)
- print_buffer:goto_pos(buffer.length)
- print_buffer:set_save_point()
- return true -- quits
-end
-
-local function trace_output(str)
- xbuffer = buffer
- print_output(str)
- if xbuffer then
- view:goto_buffer(xbuffer)
- end
-end
-
-local function clear_output()
- xbuffer = buffer
- local print_buffer = find_buffer(OUTPUT_BUFFER)
- if print_buffer then
- print_buffer:clear_all()
- end
-end
-
-local function is_output(buffer)
- return buffer._type == OUTPUT_BUFFER
-end
-
--- Instead of events we will have out own interceptors so that we don't have
--- interference. The main problem is that we don't have much control over the
--- order. If we have much actions I can always come up with something.
-
--- The textadept console seems a bit slower than the one in scite (which does some
--- output pane parsing so it could be even faster). Maybe it relates to the way
--- the program is run. Scite provides some more control over this. It might have
--- to do with the way tex pipes to the console, because from a simple lua run it's
--- quite fast. Maybe calling cmd is not optimal. Anyhow, it means that for now I
--- should not use textadept when running performance test that need to compare with
--- the past.
-
-local function process(buffer,filename,action)
- if not filename then
- filename = buffer.filename
- end
- if not filename then
- return
- end
- if filename == buffer.filename then
- buffer:annotation_clear_all() -- needed ?
- io.save_file()
- end
- if filename == "" then
- return
- end
- local suffix = match(filename,'[^/\\.]+$')
- local specification = specifications[suffix]
- if not specification then
- return
- end
- local action = specification[action]
- local quitter = nil
- if type(action) == "table" then
- action = action.command
- quitter = action.quitter
- end
- if type(action) ~= "string" then
- return
- end
- clear_output()
- local pathpart = ''
- local basename = filename
- if find(filename,'[/\\]') then
- pathpart, basename = match(filename,'^(.+[/\\])([^/\\]+)$')
- end
- -- beter strip one from the end
- local nameonly = match(basename,'^(.+)%.')
- -- more in sync which what we normally do (i'd rather use the ctx template mechanism)
- local command = gsub(action,'%%(.-)%%', {
- filename = filename,
- pathname = dirname,
- dirname = dirname,
- pathpart = dirname,
- basename = basename,
- nameonly = nameonly,
- suffix = suffix,
- selection = function() return match(buffer.get_sel_text(),"%s*([A-Za-z]+)") end,
- })
- -- for fun i'll add a ansi escape sequence lexer some day
- local function emit_output(output)
- print_output(output) -- events.emit(RUNNER_EVENT,...)
- -- afaik there is no way to check if we're waiting for input (no input callback)
- if quitter then
- local quit, message = quitter(interceptor)
- if quit then
- if message then
- print_output(format("\n\n> quit: %s\n",message))
- end
- runner.quit()
- end
- end
- end
- local function exit_output(status)
- print_output(format("\n\n> exit: %s, press esc to return to source\n",status)) -- events.emit(RUNNER_EVENT,...)
- end
- print_output(format("> command: %s\n",command)) -- events.emit(RUNNER_EVENT,...)
- currentprocess = assert(os.spawn(command, pathpart, emit_output, emit_output, exit_output))
-end
-
-function runner.install(name)
- return function(filename)
- process(buffer,filename,name)
- end
-end
-
-runner.check = runner.install("check")
-runner.process = runner.install("process")
-runner.preview = runner.install("preview")
-
-function runner.resultof(command) -- from l-os.lua
- local handle = io.popen(command,"r")
- if handle then
- local result = handle:read("*all") or ""
- handle:close()
- return result
- else
- return ""
- end
-end
-
-function runner.quit()
- if currentprocess then
- assert(currentprocess:kill())
- end
-end
-
-local function char_added(code)
- if code == 10 and currentprocess and currentprocess:status() == 'running' and buffer._type == OUTPUT_BUFFER then
- local line_num = buffer:line_from_position(buffer.current_pos) - 1
- currentprocess:write((buffer:get_line(line_num)))
- end
- return true -- quits
-end
-
-function runner.goto_error(line, next)
- -- see original code for how to do it
-end
-
-local function key_press(code)
- if xbuffer and keys.KEYSYMS[code] == 'esc' then
- view:goto_buffer(xbuffer)
- return true
- end
-end
-
-local function double_click()
- if xbuffer and is_output(buffer) then
- view:goto_buffer(xbuffer)
- return true
- end
-end
-
---
-
-local l2 = char(0xC0)
-local l3 = char(0xE0)
-local l4 = char(0xF0)
-
-local function utflen(str)
- local n = 0
- local l = 0
- for s in gmatch(str,".") do
- if l > 0 then
- l = l - 1
- else
- n = n + 1
- if s >= l4 then
- l = 3
- elseif s >= l3 then
- l = 2
- elseif s >= l2 then
- l = 1
- end
- end
- end
- return n
-end
-
-local function prepare()
- local startposition = buffer.selection_start
- local endposition = buffer.selection_end
-
- if startposition == endposition then return end
-
- buffer.current_pos = startposition
- buffer:home()
-
- buffer.current_pos = endposition
- buffer:line_end_extend()
-
- local firstposition = buffer.selection_start
- local lastposition = buffer.selection_end
-
- local firstline = buffer:line_from_position(startposition)
- local lastline = buffer:line_from_position(endposition)
-
- local startcolumn = startposition - firstposition
- local endcolumn = lastposition - endposition + 1
- local selection = buffer:get_sel_text()
-
- -- trace_output(firstposition .. " " .. startposition .. "\n")
- -- trace_output(endposition .. " " .. lastposition .. "\n")
-
- return startposition, endposition, firstposition, lastposition, startcolumn, endcolumn, firstline, lastline, selection
-end
-
-local function replace(startposition,lastposition,replacement)
- if type(replacement) == "table" then
- replacement = concat(replacement,"\n")
- end
- -- trace_output(replacement .. "\n")
-
- buffer.current_pos = startposition
-
- buffer:begin_undo_action()
- buffer:set_target_range(startposition,lastposition)
- buffer:replace_target(replacement)
- buffer:end_undo_action()
-
- buffer.selection_start = startposition
- buffer.selection_end = startposition
-end
-
--- This is old code, from my early lua days, so not that nice and optimal, but
--- no one sees it and performance is irrelevant here.
-
-local magicstring = rep("<ctx-crlf/>", 2)
-
-function runner.wrap()
-
- local startposition, endposition, firstposition, lastposition, startcolumn, endcolumn, firstline, lastline, selection = prepare()
-
- if not startposition then
- return
- end
-
- local wraplength = buffer.wrap_length
- local length = tonumber(wraplength) or 80
- local replacement = { }
- local templine = ""
- local tempsize = 0
- local indentation = rep(' ',startcolumn)
-
- selection = gsub(selection,"[\n\r][\n\r]","\n")
- selection = gsub(selection,"\n\n+"," " .. magicstring .. " ")
- selection = gsub(selection,"^%s",'')
-
- for snippet in gmatch(selection,"%S+") do
- if snippet == magicstring then
- replacement[#replacement+1] = templine
- replacement[#replacement+1] = ""
- templine = ""
- tempsize = 0
- else
- local snipsize = utflen(snippet)
- if tempsize + snipsize > length then
- replacement[#replacement+1] = templine
- templine = indentation .. snippet
- tempsize = startcolumn + snipsize
- elseif tempsize == 0 then
- templine = indentation .. snippet
- tempsize = tempsize + startcolumn + snipsize
- else
- templine = templine .. " " .. snippet
- tempsize = tempsize + 1 + snipsize
- end
- end
- end
-
- replacement[#replacement+1] = templine
- replacement[1] = gsub(replacement[1],"^%s+","")
-
- if endcolumn == 0 then
- replacement[#replacement+1] = ""
- end
-
- replace(startposition,lastposition,replacement)
-
-end
-
-local magicstring = rep("<multiplelines/>", 2)
-
-function runner.unwrap()
-
- local startposition, endposition, firstposition, lastposition, startcolumn, endcolumn, selection, firstline, lastline = prepare()
-
- if not startposition then
- return
- end
-
- startposition = firstposition
- endposition = lastposition
-
- local selection = gsub(selection,"[\n\r][\n\r]+", " " .. magicstring .. " ")
- local replacement = { }
-
- for snippet in gmatch(selection,"%S+") do
- replacement[#replacement+1] = snippet == magicstring and "" or snippet
- end
-
- if endcolumn == 0 then
- replacement[#replacement+1] = ""
- end
-
- replace(startposition,lastposition,replacement)
-
-end
-
--- This is real old crappy code which doesn't really pass my current qa standards but
--- it does the job so ... (hashing the blobs would work ok).
-
-local function grab(str,delimiter)
- local list = { }
- for snippet in gmatch(str,delimiter) do
- list[#list+1] = snippet
- end
- return list
-end
-
-local function alphacmp_yes(a,b)
- return lower(gsub(sub(a,i),"0"," ")) < lower(gsub(sub(b,i),"0"," "))
-end
-
-local function alphacmp_nop(a,b)
- return lower(a) < lower(b)
-end
-
-local function alphasort(list,i)
- sort(list,i and i > 0 and alphacmp_yes or alphacmp_nop)
-end
-
-function runner.sort()
-
- local startposition, endposition, firstposition, lastposition, startcolumn, endcolumn, firstline, lastline, selection = prepare()
-
- if not startposition then
- return
- end
-
- startposition = firstposition
- endposition = lastposition
-
- local list = grab(selection,"[^\n\r]+")
-
- alphasort(list,startcolumn)
-
- if endcolumn == 0 then
- list[#list+1] = ""
- end
-
- replace(startposition,lastposition,list)
-
-end
-
--- Tricky: we can't reset an event (because we need to know the function which is
--- local. So, a first solution injected a false into the table which will trigger
--- a break and then I found out that returning true has the same effect. Then I
--- found out that we can have our own events and next decided not to use them at
--- all.
-
--- events.connect(events.RUNNER_EVENT, print_output, 1)
-
-events.connect(events.CHAR_ADDED, char_added, 1)
-events.connect(events.KEYPRESS, key_press, 1)
-events.connect(events.DOUBLE_CLICK, double_click, 1)
-
--- We need to get rid of the crash due to macros.lua event crash in
---
--- -- textadept.menu.menubar[_L['_Tools']][_L['Select Co_mmand']][2],
-
--- for i=1,#_VIEWS do
--- if _VIEWS[i].buffer._type == "[Message Buffer]" then
--- ui.goto_view(_VIEWS[i])
--- buffer.current_pos = buffer.current_pos
--- io.close_buffer()
--- break
--- end
--- end
--- for i = 1, #_BUFFERS do
--- if _BUFFERS[i]._type == "[Message Buffer]" then
--- view:goto_buffer(_BUFFERS[i])
--- buffer.current_pos = buffer.current_pos
--- io.close_buffer()
--- break
--- end
--- end
-
--- I don't want the indentation. I also want an extra space which in turn means
--- a more extensive test. I also don't care about a suffix. Adapted a bit to
--- match the code above.
-
-function runner.blockcomment()
- local buffer = buffer
- local comment = textadept.editing.comment_string[buffer:get_lexer(true)]
-
- if not comment or comment == "" then
- return
- end
-
- local prefix = comment:match('^([^|]+)|?([^|]*)$')
- local usedprefix = prefix
-
- if not prefix then
- return
- end
-
- if not find(prefix,"%s$") then
- usedprefix = prefix .. " "
- end
-
- local n_prefix = #prefix
- local n_usedprefix = #usedprefix
-
- local startposition = buffer.selection_start
- local endposition = buffer.selection_end
- local firstline = buffer:line_from_position(startposition)
- local lastline = buffer:line_from_position(endposition)
-
- if firstline ~= lastline and endposition == buffer:position_from_line(lastline) then
- lastline = lastline - 1
- end
-
- startposition = buffer.line_end_position[startposition] - startposition
- endposition = buffer.length - endposition
-
- buffer:begin_undo_action()
-
- for line=firstline,lastline do
- local p = buffer:position_from_line(line)
- if buffer:text_range(p, p + n_usedprefix) == usedprefix then
- buffer:delete_range(p, n_usedprefix)
- elseif buffer:text_range(p, p + n_prefix) == prefix then
- buffer:delete_range(p, n_prefix)
- else
- buffer:insert_text(p, usedprefix)
- end
- end
-
- buffer:end_undo_action()
-
- startposition = buffer.line_end_position[firstline] - startposition
- endposition = buffer.length - endposition
-
- -- whatever ...
-
- local start_pos = buffer:position_from_line(firstline)
-
- if start_pos > startposition then
- startposition = start_pos
- end
- if start_pos > endposition then
- endposition = start_pos
- end
-
- if firstline ~= lastline then
- buffer:set_sel(startposition, endposition)
- else
- buffer:goto_pos(endposition)
- end
-end
-
--- This only works partially as for some reason scite shows proper math symbols while
--- here we don't see them. I need to look into that.
-
-local textlists = { -- taken from sort-lan.lua
- en = {
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
- "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
- "u", "v", "w", "x", "y", "z",
-
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
- "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
- "U", "V", "W", "X", "Y", "Z",
- },
- nl = {
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
- "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
- "u", "v", "w", "x", "y", "z",
-
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
- "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
- "U", "V", "W", "X", "Y", "Z",
- },
- fr = {
- "a", "æ", "b", "c", "ç", "d", "e", "è", "é", "ê",
- "f", "g", "h", "i", "j", "k", "l", "m", "n", "o",
- "p", "q", "r", "s", "t", "u", "v", "w", "x", "y",
- "z",
-
- "A", "Æ", "B", "C", "Ç", "D", "E", "È", "É", "Ê",
- "F", "G", "H", "I", "J", "K", "L", "M", "N", "O",
- "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y",
- "Z",
-
- },
- de = {
- "a", "ä", "b", "c", "d", "e", "f", "g", "h", "i",
- "j", "k", "l", "m", "n", "o", "ö", "p", "q", "r",
- "s", "ß", "t", "u", "ü", "v", "w", "x", "y", "z",
-
- "A", "Ä", "B", "C", "D", "E", "F", "G", "H", "I",
- "J", "K", "L", "M", "N", "O", "Ö", "P", "Q", "R",
- "S", "SS", "T", "U", "Ü", "V", "W", "X", "Y", "Z",
- },
- fi = { -- finish
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
- "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
- "u", "v", "w", "x", "y", "z", "å", "ä", "ö",
-
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
- "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
- "U", "V", "W", "X", "Y", "Z", "Å", "Ä", "Ö",
- },
- sl = { -- slovenian
- "a", "b", "c", "č", "ć", "d", "đ", "e", "f", "g", "h", "i",
- "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "š", "t",
- "u", "v", "w", "x", "y", "z", "ž",
-
- "A", "B", "C", "Č", "Ć", "D", "Đ", "E", "F", "G", "H", "I",
- "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "Š", "T",
- "U", "V", "W", "X", "Y", "Z", "Ž",
- },
- ru = { -- rusian
- "а", "б", "в", "г", "д", "е", "ё", "ж", "з", "и",
- "і", "й", "к", "л", "м", "н", "о", "п", "р", "с",
- "т", "у", "ф", "х", "ц", "ч", "ш", "щ", "ъ", "ы",
- "ь", "ѣ", "э", "ю", "я", "ѳ", "ѵ",
-
- "А", "Б", "В", "Г", "Д", "Е", "Ё", "Ж", "З", "И",
- "І", "Й", "К", "Л", "М", "Н", "О", "П", "Р", "С",
- "Т", "У", "Ф", "Х", "Ц", "Ч", "Ш", "Щ", "Ъ", "Ы",
- "Ь", "Ѣ", "Э", "Ю", "Я", "Ѳ", "Ѵ",
- },
- uk = { -- ukraninuan
- "а", "б", "в", "г", "ґ", "д", "е", "є", "ж", "з", "и", "і",
- "ї", "й", "к", "л", "м", "н", "о", "п", "р", "с", "т", "у",
- "ф", "х", "ц", "ч", "ш", "щ", "ь", "ю", "я",
-
- "А", "Б", "В", "Г", "Ґ", "Д", "Е", "Є", "Ж", "З", "И", "І",
- "Ї", "Й", "К", "Л", "М", "Н", "О", "П", "Р", "С", "Т", "У",
- "Ф", "Х", "Ц", "Ч", "Ш", "Щ", "Ь", "Ю", "Я",
- },
- be = { -- belarusia
- "а", "б", "в", "г", "д", "е", "ё", "ж", "з", "і",
- "й", "к", "л", "м", "н", "о", "п", "р", "с", "т",
- "у", "ў", "ф", "х", "ц", "ч", "ш", "ы", "ь", "э",
- "ю", "я",
-
- "А", "Б", "В", "Г", "Д", "Е", "Ё", "Ж", "З", "І",
- "Й", "К", "Л", "М", "Н", "О", "П", "Р", "С", "Т",
- "У", "Ў", "Ф", "Х", "Ц", "Ч", "Ш", "Ы", "Ь", "Э",
- "Ю", "Я",
- },
- bg = { -- bulgarian
- "а", "б", "в", "г", "д", "е", "ж", "з","и", "й",
- "к", "a", "л", "a", "м", "н", "о", "п", "р", "с",
- "т", "у", "ф", "х", "ц", "ч", "ш", "щ", "ъ", "ь",
- "ю", "я",
-
- "А", "Б", "В", "Г", "Д", "Е", "Ж", "З","И", "Й",
- "К", "A", "Л", "A", "М", "Н", "О", "П", "Р", "С",
- "Т", "У", "Ф", "Х", "Ц", "Ч", "Ш", "Щ", "Ъ", "Ь",
- "Ю", "Я",
- },
- pl = { -- polish
- "a", "ą", "b", "c", "ć", "d", "e", "ę", "f", "g",
- "h", "i", "j", "k", "l", "ł", "m", "n", "ń", "o",
- "ó", "p", "q", "r", "s", "ś", "t", "u", "v", "w",
- "x", "y", "z", "ź", "ż",
-
- "A", "Ą", "B", "C", "Ć", "D", "E", "Ę", "F", "G",
- "H", "I", "J", "K", "L", "Ł", "M", "N", "Ń", "O",
- "Ó", "P", "Q", "R", "S", "Ś", "T", "U", "V", "W",
- "X", "Y", "Z", "Ź", "Ż",
- },
- cz = { -- czech
- "a", "á", "b", "c", "č", "d", "ď", "e", "é", "ě",
- "f", "g", "h", "i", "í", "j", "k", "l", "m",
- "n", "ň", "o", "ó", "p", "q", "r", "ř", "s", "š",
- "t", "ť", "u", "ú", "ů", "v", "w", "x", "y", "ý",
- "z", "ž",
-
- "A", "Á", "B", "C", "Č", "D", "Ď", "E", "É", "Ě",
- "F", "G", "H", "I", "Í", "J", "K", "L", "M",
- "N", "Ň", "O", "Ó", "P", "Q", "R", "Ř", "S", "Š",
- "T", "Ť", "U", "Ú", "Ů", "V", "W", "X", "Y", "Ý",
- "Z", "Ž",
- },
- sk = { -- slovak
- "a", "á", "ä", "b", "c", "č", "d", "ď",
- "e", "é", "f", "g", "h", ch, "i", "í", "j", "k",
- "l", "ĺ", "ľ", "m", "n", "ň", "o", "ó", "ô", "p",
- "q", "r", "ŕ", "s", "š", "t", "ť", "u", "ú", "v",
- "w", "x", "y", "ý", "z", "ž",
-
- "A", "Á", "Ä", "B", "C", "Č", "D", "Ď",
- "E", "É", "F", "G", "H", "I", "Í", "J", "K",
- "L", "Ĺ", "Ľ", "M", "N", "Ň", "O", "Ó", "Ô", "P",
- "Q", "R", "Ŕ", "S", "Š", "T", "Ť", "U", "Ú", "V",
- "W", "X", "Y", "Ý", "Z", "Ž",
- },
- hr = { -- croatian
- "a", "b", "c", "č", "ć", "d", "đ", "e", "f",
- "g", "h", "i", "j", "k", "l", "m", "n",
- "o", "p", "r", "s", "š", "t", "u", "v", "z", "ž",
-
- "A", "B", "C", "Č", "Ć", "D", "Đ", "E", "F",
- "G", "H", "I", "J", "K", "L", "M", "N",
- "O", "P", "R", "S", "Š", "T", "U", "V", "Z", "Ž",
- },
- sr = { -- serbian
- "а", "б", "в", "г", "д", "ђ", "е", "ж", "з", "и",
- "ј", "к", "л", "љ", "м", "н", "њ", "о", "п", "р",
- "с", "т", "ћ", "у", "ф", "х", "ц", "ч", "џ", "ш",
-
- "А", "Б", "В", "Г", "Д", "Ђ", "Е", "Ж", "З", "И",
- "Ј", "К", "Л", "Љ", "М", "Н", "Њ", "О", "П", "Р",
- "С", "Т", "Ћ", "У", "Ф", "Х", "Ц", "Ч", "Џ", "Ш",
- },
- no = { -- norwegian
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
- "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
- "u", "v", "w", "x", "y", "z", "æ", "ø", "å",
-
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
- "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
- "U", "V", "W", "X", "Y", "Z", "Æ", "Ø", "Å",
- },
- da = { --danish
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
- "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
- "u", "v", "w", "x", "y", "z", "æ", "ø", "å",
-
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
- "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
- "U", "V", "W", "X", "Y", "Z", "Æ", "Ø", "Å",
- },
- sv = { -- swedish
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
- "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
- "u", "v", "w", "x", "y", "z", "å", "ä", "ö",
-
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
- "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
- "U", "V", "W", "X", "Y", "Z", "Å", "Ä", "Ö",
- },
- is = { -- islandic
- "a", "á", "b", "d", "ð", "e", "é", "f", "g", "h",
- "i", "í", "j", "k", "l", "m", "n", "o", "ó", "p",
- "r", "s", "t", "u", "ú", "v", "x", "y", "ý", "þ",
- "æ", "ö",
-
- "A", "Á", "B", "D", "Ð", "E", "É", "F", "G", "H",
- "I", "Í", "J", "K", "L", "M", "N", "O", "Ó", "P",
- "R", "S", "T", "U", "Ú", "V", "X", "Y", "Ý", "Þ",
- "Æ", "Ö",
- },
- -- gr = { -- greek
- -- "α", "ά", "ὰ", "ᾶ", "ᾳ", "ἀ", "ἁ", "ἄ", "ἂ", "ἆ",
- -- "ἁ", "ἅ", "ἃ", "ἇ", "ᾁ", "ᾴ", "ᾲ", "ᾷ", "ᾄ", "ᾂ",
- -- "ᾅ", "ᾃ", "ᾆ", "ᾇ", "β", "γ", "δ", "ε", "έ", "ὲ",
- -- "ἐ", "ἔ", "ἒ", "ἑ", "ἕ", "ἓ", "ζ", "η", "η", "ή",
- -- "ὴ", "ῆ", "ῃ", "ἠ", "ἤ", "ἢ", "ἦ", "ᾐ", "ἡ", "ἥ",
- -- "ἣ", "ἧ", "ᾑ", "ῄ", "ῂ", "ῇ", "ᾔ", "ᾒ", "ᾕ", "ᾓ",
- -- "ᾖ", "ᾗ", "θ", "ι", "ί", "ὶ", "ῖ", "ἰ", "ἴ", "ἲ",
- -- "ἶ", "ἱ", "ἵ", "ἳ", "ἷ", "ϊ", "ΐ", "ῒ", "ῗ", "κ",
- -- "λ", "μ", "ν", "ξ", "ο", "ό", "ὸ", "ὀ", "ὄ", "ὂ",
- -- "ὁ", "ὅ", "ὃ", "π", "ρ", "ῤ", "ῥ", "σ", "ς", "τ",
- -- "υ", "ύ", "ὺ", "ῦ", "ὐ", "ὔ", "ὒ", "ὖ", "ὑ", "ὕ",
- -- "ὓ", "ὗ", "ϋ", "ΰ", "ῢ", "ῧ", "φ", "χ", "ψ", "ω",
- -- "ώ", "ὼ", "ῶ", "ῳ", "ὠ", "ὤ", "ὢ", "ὦ", "ᾠ", "ὡ",
- -- "ὥ", "ὣ", "ὧ", "ᾡ", "ῴ", "ῲ", "ῷ", "ᾤ", "ᾢ", "ᾥ",
- -- "ᾣ", "ᾦ", "ᾧ",
- --
- -- "Α", "Ά", "Ὰ", "Α͂", "Ἀ", "Ἁ", "Ἄ", "Ἂ", "Ἆ",
- -- "Ἁ", "Ἅ", "Ἃ", "Ἇ",
- -- "Β", "Γ", "Δ", "Ε", "Έ", "Ὲ",
- -- "Ἐ", "Ἔ", "Ἒ", "Ἑ", "Ἕ", "Ἓ", "Ζ", "Η", "Η", "Ή",
- -- "Ὴ", "Η͂", "Ἠ", "Ἤ", "Ἢ", "Ἦ", "Ἡ", "Ἥ",
- -- "Ἣ", "Ἧ",
- -- "Θ", "Ι", "Ί", "Ὶ", "Ι͂", "Ἰ", "Ἴ", "Ἲ",
- -- "Ἶ", "Ἱ", "Ἵ", "Ἳ", "Ἷ", "Ϊ", "Ϊ́", "Ϊ̀", "Ϊ͂", "Κ",
- -- "Λ", "Μ", "Ν", "Ξ", "Ο", "Ό", "Ὸ", "Ὀ", "Ὄ", "Ὂ",
- -- "Ὁ", "Ὅ", "Ὃ", "Π", "Ρ", "Ρ̓", "Ῥ", "Σ", "Σ", "Τ",
- -- "Υ", "Ύ", "Ὺ", "Υ͂", "Υ̓", "Υ̓́", "Υ̓̀", "Υ̓͂", "Ὑ", "Ὕ",
- -- "Ὓ", "Ὗ", "Ϋ", "Ϋ́", "Ϋ̀", "Ϋ͂", "Φ", "Χ", "Ψ", "Ω",
- -- "Ώ", "Ὼ", "Ω͂", "Ὠ", "Ὤ", "Ὢ", "Ὦ", "Ὡ",
- -- "Ὥ", "Ὣ", "Ὧ",
- -- },
- gr = { -- greek
- "α", "β", "γ", "δ", "ε", "ζ", "η", "θ", "ι", "κ",
- "λ", "μ", "ν", "ξ", "ο", "π", "ρ", "ς", "τ", "υ",
- "φ", "χ", "ψ", "ω",
-
- "Α", "Β", "Γ", "Δ", "Ε", "Ζ", "Η", "Θ", "Ι", "Κ",
- "Λ", "Μ", "Ν", "Ξ", "Ο", "Π", "Ρ", "Σ", "Τ", "Υ",
- "Χ", "Ψ", "Ω",
- },
- la = { -- latin
- "a", "ā", "ă", "b", "c", "d", "e", "ē", "ĕ", "f",
- "g", "h", "i", "ī", "ĭ", "j", "k", "l", "m", "n",
- "o", "ō", "ŏ", "p", "q", "r", "s", "t", "u", "ū",
- "ŭ", "v", "w", "x", "y", "ȳ", "y̆", "z", "æ",
-
- "A", "Ā", "Ă", "B", "C", "D", "E", "Ē", "Ĕ", "F",
- "G", "H", "I", "Ī", "Ĭ", "J", "K", "L", "M", "N",
- "O", "Ō", "Ŏ", "P", "Q", "R", "S", "T", "U", "Ū",
- "Ŭ", "V", "W", "X", "Y", "Ȳ", "Y̆", "Z", "Æ",
- },
- it = { -- italian
- "a", "á", "b", "c", "d", "e", "é", "è", "f", "g",
- "h", "i", "í", "ì", "j", "k", "l", "m", "n", "o",
- "ó", "ò", "p", "q", "r", "s", "t", "u", "ú", "ù",
- "v", "w", "x", "y", "z",
-
- "A", "Á", "B", "C", "D", "E", "É", "È", "F", "G",
- "H", "I", "Í", "Ì", "J", "K", "L", "M", "N", "O",
- "Ó", "Ò", "P", "Q", "R", "S", "T", "U", "Ú", "Ù",
- "V", "W", "X", "Y", "Z",
- },
- ro = { -- romanian
- "a", "ă", "â", "b", "c", "d", "e", "f", "g", "h",
- "i", "î", "j", "k", "l", "m", "n", "o", "p", "q",
- "r", "s", "ș", "t", "ț", "u", "v", "w", "x", "y",
- "z",
-
- "A", "Ă", "Â", "B", "C", "D", "E", "F", "G", "H",
- "I", "Î", "J", "K", "L", "M", "N", "O", "P", "Q",
- "R", "S", "Ș", "T", "Ț", "U", "V", "W", "X", "Y",
- "Z",
- },
- es = { -- spanish
- "a", "á", "b", "c", "d", "e", "é", "f", "g", "h",
- "i", "í", "j", "k", "l", "m", "n", "ñ", "o", "ó",
- "p", "q", "r", "s", "t", "u", "ú", "ü", "v", "w",
- "x", "y", "z",
-
- "A", "Á", "B", "C", "D", "E", "É", "F", "G", "H",
- "I", "Í", "J", "K", "L", "M", "N", "Ñ", "O", "Ó",
- "P", "Q", "R", "S", "T", "U", "Ú", "Ü", "V", "W",
- "X", "Y", "Z",
- },
- pt = { -- portuguese
- "a", "á", "â", "ã", "à", "b", "c", "ç", "d", "e",
- "é", "ê", "f", "g", "h", "i", "í", "j", "k", "l",
- "m", "n", "o", "ó", "ô", "õ", "p", "q", "r", "s",
- "t", "u", "ú", "ü", "v", "w", "x", "y", "z",
-
- "A", "Á", "Â", "Ã", "À", "B", "C", "Ç", "D", "E",
- "É", "Ê", "F", "G", "H", "I", "Í", "J", "K", "L",
- "M", "N", "O", "Ó", "Ô", "Õ", "P", "Q", "R", "S",
- "T", "U", "Ú", "Ü", "V", "W", "X", "Y", "Z",
- },
- lt = { -- lithuanian
- "a", "ą", "b", "c", "ch", "č", "d", "e", "ę", "ė",
- "f", "g", "h", "i", "į", "y", "j", "k", "l", "m",
- "n", "o", "p", "r", "s", "š", "t", "u", "ų", "ū",
- "v", "z", "ž",
-
- "A", "Ą", "B", "C", "CH", "Č", "D", "E", "Ę", "Ė",
- "F", "G", "H", "I", "Į", "Y", "J", "K", "L", "M",
- "N", "O", "P", "R", "S", "Š", "T", "U", "Ų", "Ū",
- "V", "Z", "Ž",
- },
- lv = { -- latvian
- "a", "ā", "b", "c", "č", "d", "e", "ē", "f", "g",
- "ģ", "h", "i", "ī", "j", "k", "ķ", "l", "ļ", "m",
- "n", "ņ", "o", "ō", "p", "r", "ŗ", "s", "š", "t",
- "u", "ū", "v", "z", "ž",
-
- "A", "Ā", "B", "C", "Č", "D", "E", "Ē", "F", "G",
- "Ģ", "H", "I", "Ī", "J", "K", "Ķ", "L", "Ļ", "M",
- "N", "Ņ", "O", "Ō", "P", "R", "Ŗ", "S", "Š", "T",
- "U", "Ū", "V", "Z", "Ž",
- },
- hu = { -- hungarian
- "a", "á", "b", "c", "d", "e", "é",
- "f", "g", "h", "i", "í", "j", "k", "l",
- "m", "n", "o", "ó", "ö", "ő", "p", "q", "r",
- "s", "t", "u", "ú", "ü", "ű", "v", "w",
- "x", "y", "z",
-
- "A", "Á", "B", "C", "D", "E", "É",
- "F", "G", "H", "I", "Í", "J", "K", "L",
- "M", "N", "O", "Ó", "Ö", "Ő", "P", "Q", "R",
- "S", "T", "U", "Ú", "Ü", "Ű", "V", "W",
- "X", "Y", "Z",
- },
- et = { -- estonian
- "a", "b", "d", "e", "f", "g", "h", "i", "j", "k",
- "l", "m", "n", "o", "p", "r", "s", "š", "z", "ž",
- "t", "u", "v", "w", "õ", "ä", "ö", "ü", "x", "y",
-
- "A", "B", "D", "E", "F", "G", "H", "I", "J", "K",
- "L", "M", "N", "O", "P", "R", "S", "Š", "Z", "Ž",
- "T", "U", "V", "W", "Õ", "Ä", "Ö", "Ü", "X", "Y",
- },
- -- jp = { -- japanese
- -- "あ", "い", "う", "え", "お", "か", "き", "く", "け", "こ",
- -- "さ", "し", "す", "せ", "そ", "た", "ち", "つ", "て", "と",
- -- "な", "に", "ぬ", "ね", "の", "は", "ひ", "ふ", "へ", "ほ",
- -- "ま", "み", "む", "め", "も", "や", "ゆ", "よ",
- -- "ら", "り", "る", "れ", "ろ", "わ", "ゐ", "ゑ", "を", "ん",
- -- },
-}
-
-local textselector = { }
-for k, v in next, textlists do
- textselector[#textselector+1] = k
-end
-sort(textselector)
-
-local mathsets = {
- { "tf", {
- "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z",
- "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z",
- "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"
- }, },
- { "bf", {
- "𝐛", "𝐜", "𝐝", "𝐞", "𝐟", "𝐠", "𝐡", "𝐢", "𝐣", "𝐤", "𝐥", "𝐦", "𝐧", "𝐨", "𝐩", "𝐪", "𝐫", "𝐬", "𝐭", "𝐮", "𝐯", "𝐰", "𝐱", "𝐲", "𝐳",
- "𝐀", "𝐁", "𝐂", "𝐃", "𝐄", "𝐅", "𝐆", "𝐇", "𝐈", "𝐉", "𝐊", "𝐋", "𝐌", "𝐍", "𝐎", "𝐏", "𝐐", "𝐑", "𝐒", "𝐓", "𝐔", "𝐕", "𝐖", "𝐗", "𝐘", "𝐙", "𝐚",
- "𝟎", "𝟏", "𝟐", "𝟑", "𝟒", "𝟓", "𝟔", "𝟕", "𝟖", "𝟗"
- }, },
- { "it", {
- "𝑎", "𝑏", "𝑐", "𝑑", "𝑒", "𝑓", "𝑔", "ℎ", "𝑖", "𝑗", "𝑘", "𝑙", "𝑚", "𝑛", "𝑜", "𝑝", "𝑞", "𝑟", "𝑠", "𝑡", "𝑢", "𝑣", "𝑤", "𝑥", "𝑦", "𝑧",
- "𝐴", "𝐵", "𝐶", "𝐷", "𝐸", "𝐹", "𝐺", "𝐻", "𝐼", "𝐽", "𝐾", "𝐿", "𝑀", "𝑁", "𝑂", "𝑃", "𝑄", "𝑅", "𝑆", "𝑇", "𝑈", "𝑉", "𝑊", "𝑋", "𝑌", "𝑍",
- }, },
- { "bi", {
- "𝒂", "𝒃", "𝒄", "𝒅", "𝒆", "𝒇", "𝒈", "𝒉", "𝒊", "𝒋", "𝒌", "𝒍", "𝒎", "𝒏", "𝒐", "𝒑", "𝒒", "𝒓", "𝒔", "𝒕", "𝒖", "𝒗", "𝒘", "𝒙", "𝒚", "𝒛",
- "𝑨", "𝑩", "𝑪", "𝑫", "𝑬", "𝑭", "𝑮", "𝑯", "𝑰", "𝑱", "𝑲", "𝑳", "𝑴", "𝑵", "𝑶", "𝑷", "𝑸", "𝑹", "𝑺", "𝑻", "𝑼", "𝑽", "𝑾", "𝑿", "𝒀", "𝒁",
- }, },
- { "sc", {
- "𝒵", "𝒶", "𝒷", "𝒸", "𝒹", "ℯ", "𝒻", "ℊ", "𝒽", "𝒾", "𝒿", "𝓀", "𝓁", "𝓂", "𝓃", "ℴ", "𝓅", "𝓆", "𝓇", "𝓈", "𝓉", "𝓊", "𝓋", "𝓌", "𝓍", "𝓎", "𝓏",
- "𝒜", "ℬ", "𝒞", "𝒟", "ℰ", "ℱ", "𝒢", "ℋ", "ℐ", "𝒥", "𝒦", "ℒ", "ℳ", "𝒩", "𝒪", "𝒫", "𝒬", "ℛ", "𝒮", "𝒯", "𝒰", "𝒱", "𝒲", "𝒳", "𝒴",
- }, },
- { "sc bf", {
- "𝓪", "𝓫", "𝓬", "𝓭", "𝓮", "𝓯", "𝓰", "𝓱", "𝓲", "𝓳", "𝓴", "𝓵", "𝓶", "𝓷", "𝓸", "𝓹", "𝓺", "𝓻", "𝓼", "𝓽", "𝓾", "𝓿", "𝔀", "𝔁", "𝔂", "𝔃",
- "𝓐", "𝓑", "𝓒", "𝓓", "𝓔", "𝓕", "𝓖", "𝓗", "𝓘", "𝓙", "𝓚", "𝓛", "𝓜", "𝓝", "𝓞", "𝓟", "𝓠", "𝓡", "𝓢", "𝓣", "𝓤", "𝓥", "𝓦", "𝓧", "𝓨", "𝓩",
- }, },
- { "fr", {
- "𝔞", "𝔟", "𝔠", "𝔡", "𝔢", "𝔣", "𝔤", "𝔥", "𝔦", "𝔧", "𝔨", "𝔩", "𝔪", "𝔫", "𝔬", "𝔭", "𝔮", "𝔯", "𝔰", "𝔱", "𝔲", "𝔳", "𝔴", "𝔵", "𝔶", "𝔷",
- "𝔄", "𝔅", "ℭ", "𝔇", "𝔈", "𝔉", "𝔊", "ℌ", "ℑ", "𝔍", "𝔎", "𝔏", "𝔐", "𝔑", "𝔒", "𝔓", "𝔔", "ℜ", "𝔖", "𝔗", "𝔘", "𝔙", "𝔚", "𝔛", "𝔜", "ℨ",
- }, },
- { "ds", {
- "𝕓", "𝕔", "𝕕", "𝕖", "𝕗", "𝕘", "𝕙", "𝕚", "𝕛", "𝕜", "𝕝", "𝕞", "𝕟", "𝕠", "𝕡", "𝕢", "𝕣", "𝕤", "𝕥", "𝕦", "𝕧", "𝕨", "𝕩", "𝕪", "𝕫",
- "𝔸", "𝔹", "ℂ", "𝔻", "𝔼", "𝔽", "𝔾", "ℍ", "𝕀", "𝕁", "𝕂", "𝕃", "𝕄", "ℕ", "𝕆", "ℙ", "ℚ", "ℝ", "𝕊", "𝕋", "𝕌", "𝕍", "𝕎", "𝕏", "𝕐", "ℤ", "𝕒",
- "𝟘", "𝟙", "𝟚", "𝟛", "𝟜", "𝟝", "𝟞", "𝟟", "𝟠", "𝟡"
- }, },
- { "fr bf", {
- "𝕬", "𝕭", "𝕮", "𝕯", "𝕰", "𝕱", "𝕲", "𝕳", "𝕴", "𝕵", "𝕶", "𝕷", "𝕸", "𝕹", "𝕺", "𝕻", "𝕼", "𝕽", "𝕾", "𝕿", "𝖀", "𝖁", "𝖂", "𝖃",
- "𝖄", "𝖅", "𝖆", "𝖇", "𝖈", "𝖉", "𝖊", "𝖋", "𝖌", "𝖍", "𝖎", "𝖏", "𝖐", "𝖑", "𝖒", "𝖓", "𝖔", "𝖕", "𝖖", "𝖗", "𝖘", "𝖙", "𝖚", "𝖛", "𝖜", "𝖝", "𝖞", "𝖟"
- }, },
- { "ss tf", {
- "𝖺", "𝖻", "𝖼", "𝖽", "𝖾", "𝖿", "𝗀", "𝗁", "𝗂", "𝗃", "𝗄", "𝗅", "𝗆", "𝗇", "𝗈", "𝗉", "𝗊", "𝗋", "𝗌", "𝗍", "𝗎", "𝗏", "𝗐", "𝗑", "𝗒", "𝗓",
- "𝖠", "𝖡", "𝖢", "𝖣", "𝖤", "𝖥", "𝖦", "𝖧", "𝖨", "𝖩", "𝖪", "𝖫", "𝖬", "𝖭", "𝖮", "𝖯", "𝖰", "𝖱", "𝖲", "𝖳", "𝖴", "𝖵", "𝖶", "𝖷", "𝖸", "𝖹",
- "𝟢", "𝟣", "𝟤", "𝟥", "𝟦", "𝟧", "𝟨", "𝟩", "𝟪", "𝟫"
- }, },
- { "ss bf", {
- "𝗮", "𝗯", "𝗰", "𝗱", "𝗲", "𝗳", "𝗴", "𝗵", "𝗶", "𝗷", "𝗸", "𝗹", "𝗺", "𝗻", "𝗼", "𝗽", "𝗾", "𝗿", "𝘀", "𝘁", "𝘂", "𝘃", "𝘄", "𝘅", "𝘆", "𝘇",
- "𝗔", "𝗕", "𝗖", "𝗗", "𝗘", "𝗙", "𝗚", "𝗛", "𝗜", "𝗝", "𝗞", "𝗟", "𝗠", "𝗡", "𝗢", "𝗣", "𝗤", "𝗥", "𝗦", "𝗧", "𝗨", "𝗩", "𝗪", "𝗫", "𝗬", "𝗭",
- "𝟬", "𝟭", "𝟮", "𝟯", "𝟰", "𝟱", "𝟲", "𝟳", "𝟴", "𝟵",
- }, },
- { "ss it", {
- "𝘢", "𝘣", "𝘤", "𝘥", "𝘦", "𝘧", "𝘨", "𝘩", "𝘪", "𝘫", "𝘬", "𝘭", "𝘮", "𝘯", "𝘰", "𝘱", "𝘲", "𝘳", "𝘴", "𝘵", "𝘶", "𝘷", "𝘸", "𝘹", "𝘺", "𝘻",
- "𝘈", "𝘉", "𝘊", "𝘋", "𝘌", "𝘍", "𝘎", "𝘏", "𝘐", "𝘑", "𝘒", "𝘓", "𝘔", "𝘕", "𝘖", "𝘗", "𝘘", "𝘙", "𝘚", "𝘛", "𝘜", "𝘝", "𝘞", "𝘟", "𝘠", "𝘡",
- }, },
- { "ss bi", {
- "𝙖", "𝙗", "𝙘", "𝙙", "𝙚", "𝙛", "𝙜", "𝙝", "𝙞", "𝙟", "𝙠", "𝙡", "𝙢", "𝙣", "𝙤", "𝙥", "𝙦", "𝙧", "𝙨", "𝙩", "𝙪", "𝙫", "𝙬", "𝙭", "𝙮", "𝙯",
- "𝘼", "𝘽", "𝘾", "𝘿", "𝙀", "𝙁", "𝙂", "𝙃", "𝙄", "𝙅", "𝙆", "𝙇", "𝙈", "𝙉", "𝙊", "𝙋", "𝙌", "𝙍", "𝙎", "𝙏", "𝙐", "𝙑", "𝙒", "𝙓", "𝙔", "𝙕",
- }, },
- { "tt", {
- "𝚊", "𝚋", "𝚌", "𝚍", "𝚎", "𝚏", "𝚐", "𝚑", "𝚒", "𝚓", "𝚔", "𝚕", "𝚖", "𝚗", "𝚘", "𝚙", "𝚚", "𝚛", "𝚜", "𝚝", "𝚞", "𝚟", "𝚠", "𝚡", "𝚢", "𝚣",
- "𝙰", "𝙱", "𝙲", "𝙳", "𝙴", "𝙵", "𝙶", "𝙷", "𝙸", "𝙹", "𝙺", "𝙻", "𝙼", "𝙽", "𝙾", "𝙿", "𝚀", "𝚁", "𝚂", "𝚃", "𝚄", "𝚅", "𝚆", "𝚇", "𝚈", "𝚉",
- "𝟶", "𝟷", "𝟸", "𝟹", "𝟺", "𝟻", "𝟼", "𝟽", "𝟾", "𝟿"
- }, },
- { "gr tf", {
- "α", "β", "γ", "δ", "ε", "ζ", "η", "θ", "ι", "κ", "λ", "μ", "ν", "ξ", "ο", "π", "ρ", "ς", "σ", "τ", "υ", "φ", "χ", "ψ", "ω",
- "Α", "Β", "Γ", "Δ", "Ε", "Ζ", "Η", "Θ", "Ι", "Κ", "Λ", "Μ", "Ν", "Ξ", "Ο", "Π", "Ρ", "΢", "Σ", "Τ", "Υ", "Φ", "Χ", "Ψ", "Ω",
- }, },
- { "gr bf", {
- "𝛂", "𝛃", "𝛄", "𝛅", "𝛆", "𝛇", "𝛈", "𝛉", "𝛊", "𝛋", "𝛌", "𝛍", "𝛎", "𝛏", "𝛐", "𝛑", "𝛒", "𝛓", "𝛔", "𝛕", "𝛖", "𝛗", "𝛘", "𝛙", "𝛚",
- "𝚨", "𝚩", "𝚪", "𝚫", "𝚬", "𝚭", "𝚮", "𝚯", "𝚰", "𝚱", "𝚲", "𝚳", "𝚴", "𝚵", "𝚶", "𝚷", "𝚸", "𝚹", "𝚺", "𝚻", "𝚼", "𝚽", "𝚾", "𝚿", "𝛀",
- }, },
- { "gr it", {
- "𝛼", "𝛽", "𝛾", "𝛿", "𝜀", "𝜁", "𝜂", "𝜃", "𝜄", "𝜅", "𝜆", "𝜇", "𝜈", "𝜉", "𝜊", "𝜋", "𝜌", "𝜍", "𝜎", "𝜏", "𝜐", "𝜑", "𝜒", "𝜓", "𝜔",
- "𝛢", "𝛣", "𝛤", "𝛥", "𝛦", "𝛧", "𝛨", "𝛩", "𝛪", "𝛫", "𝛬", "𝛭", "𝛮", "𝛯", "𝛰", "𝛱", "𝛲", "𝛳", "𝛴", "𝛵", "𝛶", "𝛷", "𝛸", "𝛹", "𝛺",
- }, },
- { "gr bi", {
- "𝜶", "𝜷", "𝜸", "𝜹", "𝜺", "𝜻", "𝜼", "𝜽", "𝜾", "𝜿", "𝝀", "𝝁", "𝝂", "𝝃", "𝝄", "𝝅", "𝝆", "𝝇", "𝝈", "𝝉", "𝝊", "𝝋", "𝝌", "𝝍", "𝝎",
- "𝜜", "𝜝", "𝜞", "𝜟", "𝜠", "𝜡", "𝜢", "𝜣", "𝜤", "𝜥", "𝜦", "𝜧", "𝜨", "𝜩", "𝜪", "𝜫", "𝜬", "𝜭", "𝜮", "𝜯", "𝜰", "𝜱", "𝜲", "𝜳", "𝜴",
- }, },
- { "gr ss bf", {
- "𝝰", "𝝱", "𝝲", "𝝳", "𝝴", "𝝵", "𝝶", "𝝷", "𝝸", "𝝹", "𝝺", "𝝻", "𝝼", "𝝽", "𝝾", "𝝿", "𝞀", "𝞁", "𝞂", "𝞃", "𝞄", "𝞅", "𝞆", "𝞇", "𝞈",
- "𝝖", "𝝗", "𝝘", "𝝙", "𝝚", "𝝛", "𝝜", "𝝝", "𝝞", "𝝟", "𝝠", "𝝡", "𝝢", "𝝣", "𝝤", "𝝥", "𝝦", "𝝧", "𝝨", "𝝩", "𝝪", "𝝫", "𝝬", "𝝭", "𝝮",
- }, },
- { "gr ss bi", {
- "𝞪", "𝞫", "𝞬", "𝞭", "𝞮", "𝞯", "𝞰", "𝞱", "𝞲", "𝞳", "𝞴", "𝞵", "𝞶", "𝞷", "𝞸", "𝞹", "𝞺", "𝞻", "𝞼", "𝞽", "𝞾", "𝞿", "𝟀", "𝟁", "𝟂",
- "𝞐", "𝞑", "𝞒", "𝞓", "𝞔", "𝞕", "𝞖", "𝞗", "𝞘", "𝞙", "𝞚", "𝞛", "𝞜", "𝞝", "𝞞", "𝞟", "𝞠", "𝞡", "𝞢", "𝞣", "𝞤", "𝞥", "𝞦", "𝞧", "𝞨",
- }, },
- { "op", {
- }, },
- { "sy a", {
- }, },
- { "sy b", {
- }, },
- { "sy c", {
- }, },
-}
-
-local mathlists = { }
-local mathselector = { }
-
-for i=1,#mathsets do
- local mathset = mathsets[i]
- mathselector[#mathselector+1] = mathset[1]
- mathlists[mathset[1]] = mathset[2]
-end
-
-local enabled = 0
-local usedlists = {
- { name = "text", current = "en", lists = textlists, selector = textselector },
- { name = "math", current = "tf", lists = mathlists, selector = mathselector },
-}
-
--- I haven't found out yet how to create a strip as in scite.
-
--- local function make_strip()
--- local used = usedlists[enabled]
--- local lists = used.lists
--- local alphabet = lists[used.current]
--- local selector = "(hide)(" .. concat(used.selector,")(") .. ")"
--- local alphabet = "(" .. used.current .. ":)(" .. concat(alphabet,")(") .. ")"
--- -- scite.StripShow(selector .. "\n" .. alphabet)
--- end
---
--- local function hide_strip()
--- -- scite.StripShow("")
--- end
---
--- local function process_strip(control)
--- -- local value = scite.StripValue(control)
--- -- if value == "hide" then
--- -- hide_strip()
--- -- return
--- -- elseif find(value,".+:") then
--- -- return
--- -- end
--- -- local used = usedlists[enabled]
--- -- if used.lists[value] then
--- -- used.current = value
--- -- make_strip()
--- -- else
--- -- editor:insert(editor.CurrentPos,value)
--- -- end
--- end
---
--- local function ignore_strip()
--- end
-
-function runner.unicodes(name)
--- enabled = enabled + 1
--- if usedlists[enabled] then
--- make_strip()
--- else
--- enabled = 0
--- hide_strip()
--- end
-end
-
-return runner
-
--- The ui.print function is a bit heavy as each flush will parse the whole list of buffers.
--- Also it does some tab magic that we don't need or want. There is the original ui.print for
--- that. FWIW, speed is not an issue. Some optimizations:
-
--- function _print(buffer_type,one,two,...)
--- ...
--- print_buffer:append_text(one)
--- if two then
--- print_buffer:append_text(two)
--- for i=1, select('#', ...) do
--- print_buffer:append_text((select(i,...)))
--- end
--- end
--- print_buffer:append_text('\n')
--- ...
--- end
---
--- And a better splitter:
--- ...
--- local rest
--- local function emit_output(output)
--- for line, lineend in output:gmatch('([^\r\n]+)([\r\n]?)') do
--- if rest then
--- line = rest .. line
--- rest = nil
--- end
--- if lineend and lineend ~= "" then
--- events.emit(event, line, ext_or_lexer)
--- else
--- rest = line
--- end
--- end
--- end
--- ...
--- if rest then
--- events.emit(event,rest,ext_or_lexer)
--- end
--- events.emit(event, '> exit status: '..status)
--- ...
diff --git a/context/data/textadept/context/modules/textadept-context-settings.lua b/context/data/textadept/context/modules/textadept-context-settings.lua
deleted file mode 100644
index 6a9f49d51..000000000
--- a/context/data/textadept/context/modules/textadept-context-settings.lua
+++ /dev/null
@@ -1,152 +0,0 @@
-local info = {
- version = 1.002,
- comment = "presets for textadept for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-
-if context then
-
- function context.synchronize()
- local buffer = buffer
- local property = lexer.property
- local property_int = lexer.property_int
-
- buffer:set_fold_margin_colour (true, property_int["color.light"])
- buffer:set_fold_margin_hi_colour (true, property_int["color.white"])
- buffer:set_sel_fore (false, property_int["color.dark"])
- buffer:set_sel_back (true, property_int["color.selection"])
-
- local MARK_BOOKMARK = textadept.bookmarks.MARK_BOOKMARK
- local MARK_WARNING = textadept.run.MARK_WARNING
- local MARK_ERROR = textadept.run.MARK_ERROR
-
- -- buffer.marker_fore[MARK_BOOKMARK] = property_int["color.white"]
- buffer.marker_back[MARK_BOOKMARK] = property_int["color.blue"]
- -- buffer.marker_fore[MARK_WARNING] = property_int["color.white"]
- buffer.marker_back[MARK_WARNING] = property_int["color.orange"]
- -- buffer.marker_fore[MARK_ERROR] = property_int["color.white"]
- buffer.marker_back[MARK_ERROR] = property_int["color.red"]
- for i = 25, 31 do
- buffer.marker_fore[i] = property_int["color.white"]
- buffer.marker_back[i] = property_int["color.grey"]
- buffer.marker_back_selected[i] = property_int["color.dark"]
- end
-
- local INDIC_BRACEMATCH = textadept.editing .INDIC_BRACEMATCH
- local INDIC_HIGHLIGHT = textadept.editing .INDIC_HIGHLIGHT
- local INDIC_PLACEHOLDER = textadept.snippets.INDIC_PLACEHOLDER
- local INDIC_FIND = ui.find.INDIC_FIND
-
- buffer.indic_fore [INDIC_FIND] = property_int["color.gray"]
- buffer.indic_alpha[INDIC_FIND] = 255
- buffer.indic_fore [INDIC_BRACEMATCH] = property_int["color.orange"]
- buffer.indic_style[INDIC_BRACEMATCH] = buffer.INDIC_BOX -- hard to see (I need to check scite)
- buffer.indic_fore [INDIC_HIGHLIGHT] = property_int["color.gray"]
- buffer.indic_alpha[INDIC_HIGHLIGHT] = 255
- buffer.indic_fore [INDIC_PLACEHOLDER] = property_int["color.gray"]
-
- -- buffer:brace_highlight_indicator(false, INDIC_BRACEMATCH)
-
- -- buffer.call_tip_fore_hlt = property_int["color.blue"]
- buffer.edge_colour = property_int["color.grey"]
-
- buffer.tab_width = 4
- buffer.use_tabs = false
- buffer.indent = 4
- buffer.tab_indents = true
- buffer.back_space_un_indents = true
- buffer.indentation_guides = not CURSES and buffer.IV_LOOKBOTH or buffer.IV_NONE
- buffer.wrap_length = 80
-
- buffer.sel_eol_filled = true
- -- buffer.sel_alpha =
- buffer.multiple_selection = true
- buffer.additional_selection_typing = true
- -- buffer.multi_paste = buffer.MULTIPASTE_EACH
- -- buffer.virtual_space_options = buffer.VS_RECTANGULARSELECTION + buffer.VS_USERACCESSIBLE
- buffer.rectangular_selection_modifier = buffer.MOD_ALT
- buffer.mouse_selection_rectangular_switch = true
-
- -- buffer.additional_sel_alpha =
- -- buffer.additional_sel_fore =
- -- buffer.additional_sel_back =
-
- -- how to turn of the annoying background behind the current line ...
-
- -- buffer.additional_caret_fore =
- -- buffer.additional_carets_blink = false
- -- buffer.additional_carets_visible = false
- buffer.caret_line_visible = false -- not CURSES and buffer ~= ui.command_entry
- buffer.caret_line_visible_always = false
- -- buffer.caret_period = 0
- -- buffer.caret_style = buffer.CARETSTYLE_BLOCK
- buffer.caret_width = 10
- buffer.caret_sticky = buffer.CARETSTICKY_ON
- buffer.caret_fore = property_int["color.black"]
- buffer.caret_line_back = property_int["color.light"]
- -- buffer.caret_line_back_alpha =
-
- buffer.view_ws = buffer.WS_INVISIBLE
- buffer.view_eol = false
-
- buffer.annotation_visible = buffer.ANNOTATION_BOXED
-
- local NUMBER_MARGIN = 0
- local MARKER_MARGIN = 1
- local FOLD_MARGIN = 2 -- there are more
-
- buffer.margin_type_n [NUMBER_MARGIN] = buffer.MARGIN_NUMBER
- buffer.margin_width_n[NUMBER_MARGIN] = (CURSES and 0 or 6) + 4 * buffer:text_width(buffer.STYLE_LINENUMBER,'9') -- magic
- buffer.margin_width_n[MARKER_MARGIN] = CURSES and 1 or 18
- buffer.margin_width_n[FOLD_MARGIN] = CURSES and 1 or 18
-
- buffer.margin_mask_n[FOLD_MARGIN] = buffer.MASK_FOLDERS -- does something weird: bullets
-
- buffer:marker_define(buffer.MARKNUM_FOLDEROPEN, buffer.MARK_BOXMINUS)
- buffer:marker_define(buffer.MARKNUM_FOLDER, buffer.MARK_BOXPLUS)
- buffer:marker_define(buffer.MARKNUM_FOLDERSUB, buffer.MARK_VLINE)
- buffer:marker_define(buffer.MARKNUM_FOLDERTAIL, buffer.MARK_LCORNER)
- buffer:marker_define(buffer.MARKNUM_FOLDEREND, buffer.MARK_BOXPLUSCONNECTED)
- buffer:marker_define(buffer.MARKNUM_FOLDEROPENMID, buffer.MARK_BOXMINUSCONNECTED)
- buffer:marker_define(buffer.MARKNUM_FOLDERMIDTAIL, buffer.MARK_TCORNER)
-
- -- buffer.fold_all = buffer.FOLDACTION_CONTRACT + buffer.FOLDACTION_EXPAND + buffer.FOLDACTION_TOGGLE
-
- -- somehow the foldeing sumbol sin th emargin cannot be clicked on ... there seems to be some
- -- interface .. if this needs to be implemented via events i'll then probably make a copy and
- -- start doing all
-
- -- buffer.margin_sensitive_n[2] = true
-
- -- buffer.property['fold'] = "1"
- -- buffer.automatic_fold = buffer.AUTOMATICFOLD_SHOW + buffer.AUTOMATICFOLD_CLICK + buffer.AUTOMATICFOLD_CHANGE
- -- buffer.fold_flags = not CURSES and buffer.FOLDFLAG_LINEAFTER_CONTRACTED or 0
- -- buffer.fold_display_text_style = buffer.FOLDDISPLAYTEXT_BOXED
-
- buffer.wrap_mode = buffer.WRAP_NONE
-
- buffer.margin_back_n[NUMBER_MARGIN] = property_int["color.linenumber"] -- doesn't work
-
- buffer.property = {
- -- ["style.linenumber"] = property["style.linenumber"], -- somehow it fails
- }
-
- buffer.property_int = {
- -- nothing
- }
-
- -- keys [OSX and 'mr' or 'cr' ] = textadept.run.run
- -- keys [OSX and 'mR' or (GUI and 'cR' or 'cmr')] = textadept.run.compile
- -- keys [OSX and 'mB' or (GUI and 'cB' or 'cmb')] = textadept.run.build
- -- keys [OSX and 'mX' or (GUI and 'cX' or 'cmx')] = textadept.run.stop
-
- end
-
- context.synchronize()
-
-end
diff --git a/context/data/textadept/context/modules/textadept-context-types.lua b/context/data/textadept/context/modules/textadept-context-types.lua
deleted file mode 100644
index 97fb2e17c..000000000
--- a/context/data/textadept/context/modules/textadept-context-types.lua
+++ /dev/null
@@ -1,175 +0,0 @@
-local info = {
- version = 1.002,
- comment = "filetypes for textadept for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- todo: add the same ones as we have in scite
-
-local lexer = require("scite-context-lexer")
-local context = lexer.context
-local install = context.install
-
--- autopdf takes long to stop (weird, not in scite)
-
--- WIN32 and 'start "" "%e.pdf"' or OSX and 'open "%e.pdf"' or 'xdg-open "%e.pdf"',
-
-local quitter = function(output)
- return find(output,"%? +$") and true or false, "see message above"
-end
-
-local listing = {
- command = [[mtxrun --autogenerate --script context --autopdf --extra=listing --scite --compact "%basename%"]],
- quitter = quitter,
-}
-
-install {
- lexer = "scite-context-lexer-tex",
- suffixes = {
- "tex",
- "mkii",
- "mkiv", "mkvi", "mkix", "mkxi",
- "mkic", "mkci",
-
- },
- check = {
- command = [[mtxrun --autogenerate --script check "%basename%"]],
- quitter = quitter,
- },
- process = {
- command = [[mtxrun --autogenerate --script context --autopdf "%basename%"]],
- quitter = quitter,
- },
- listing = listing,
- generate = [[mtxrun --generate]],
- fonts = [[mtxrun --script fonts --reload --force]],
- clear = [[mtxrun --script cache --erase]],
- purge = [[mtxrun --script context --purgeall]],
- preview = [[]],
- logfile = [[]],
- arrange = [[]],
- unicodes = [[]],
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- lexer = "scite-context-lexer-xml",
- suffixes = {
- "xml", "xsl", "xsd", "fo", "dtd", "xslt",
- "lmx", "exa", "ctx", "export",
- "rlb", "rlg", "rlv", "rng",
- "xfdf",
- "htm", "html", "xhtml",
- "svg",
- "xul"
- },
- check = [[tidy -quiet -utf8 -xml -errors "%basename%"]],
- process = {
- command = [[mtxrun --autogenerate --script context --autopdf "%basename%"]], -- --autopdf]],
- quitter = quitter,
- },
- listing = listing,
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- lexer = "scite-context-lexer-mps",
- suffixes = {
- "mp", "mpx"
- },
- listing = listing,
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- lexer = "scite-context-lexer-lua",
- suffixes = {
- "lua", "luc",
- "cld", "tuc", "luj", "lum", "tma", "lfg", "luv", "lui"
- },
- check = [[mtxrun --autogenerate --script "%basename%"]],
- process = [[mtxrun --autogenerate --script "%basename%"]],
- preview = [[mtxrun --autogenerate --script "%basename%"]],
- listing = listing,
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- lexer = "scite-context-lexer-txt",
- suffixes = {
- "txt"
- },
- listing = listing,
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- lexer = "scite-context-lexer-pdf",
- suffixes = {
- "pdf"
- },
- encoding = "7-BIT-ASCII",
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- lexer = "scite-context-lexer-web",
- suffixes = {
- "w",
- "ww"
- },
- listing = listing,
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- lexer = "scite-context-lexer-cpp",
- suffixes = {
- "h", "c",
- "hh", "cc",
- "hpp", "cpp",
- "hxx", "cxx"
- },
- listing = listing,
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- "scite-context-lexer-bibtex",
- suffixes = {
- "bib"
- },
- listing = listing,
- setter = function(lexer)
- -- whatever
- end,
-}
-
-install {
- "scite-context-lexer-sql",
- suffixes = {
- "sql"
- },
- listing = listing,
- setter = function(lexer)
- -- whatever
- end,
-}
diff --git a/context/data/textadept/context/textadept-context.cmd b/context/data/textadept/context/textadept-context.cmd
deleted file mode 100644
index cd20e8d3d..000000000
--- a/context/data/textadept/context/textadept-context.cmd
+++ /dev/null
@@ -1,56 +0,0 @@
-@echo off
-
-rem This script starts textadept in an adapted mode, stripped from all the stuff we don't need,
-rem geared at the file formats that context deals with. The reason for this is that first of
-rem all we come from scite, but also because the average user doesn't need that much and can
-rem get confused by all kind of options that are irrelevant for editing text files.
-
-rem This startup script assumes that the files can be found relative to this script. It's kind
-rem of tricky because textadept, while being quite configurable, is not really made for such a
-rem real bare startup situation but after some trial and error, so far it works out ok. There
-rem are still some issues due to assumptions in the original code. In the meantime processing
-rem a file from within the editing sessions works ok which is a huge improvement over earlier
-rem versions of textadept (it was actually a show stopper) so now textadept can be used as a
-rem drop in for scite. We're getting there!
-
-rem Although I like the idea of textadept, it is no longer a simple Lua binding to scintilla
-rem and the claim that it is small is no longer true. The number of Lua lines doesn't really
-rem say much if there are many third party dll dependencies (at least I see many files in the
-rem zip and most of them probably relate to parts of the graphical interface and therefore most
-rem is probably not used at all. The more dependencies there are, the less interesting it is to
-rem officially support it as one of the reference editors for context, given that tex and friends
-rem aim at long term stability. It's huge and unless I'm mistaken there is no minimal lightweight
-rem variant for building a stripped down variant (in editing with mono spaced fonts we don't need
-rem all that stuff). A small static stripped binary would be really nice to have (and I'd
-rem probably default to using textadept then). I might at some point decide to strip more and just
-rem provide what we only need (which is less than is there now). We'll see how it evolves.
-
-rem In the meantime support for scintillua has been dropped which makes scite vulnerable as there
-rem is no default scite (yet) with lpeg built in. Anyway, it means that we will not provide an
-rem installer for scite or textadept which does the reference highlighting we've been using for
-rem decades. It is up to the user: use lightweight scite or a more dependent but also more
-rem configurable texadept. It would be really nice to have multiple options for editing (read: if
-rem scite would have scintillua on board.) The same is true for notepad++. Each of them has its
-rem advantage (and each is used by context users).
-
-rem Unless the textadept api changes fundamentally (as happened a couple of times before) this
-rem should work:
-
-start textadept -u %~dp0 %*
-
-rem I still need to port some of the extra functionality that we have in scite to textadept, which
-rem will happen in due time. We use our own lexers because they are more efficient and have some
-rem extra options (they were also much faster at that time and could handle very large files; they
-rem also build on already existing code in context verbatim mode). By the way, editing char-def.lua
-rem in textadept is actually now faster than in scite (using the same lpeg lexers), which is nice.
-rem There is no language strip functionality yet as there is no strip (bottom area) as in scite.
-
-rem The macros.lua file has some hard coded assumptions wrt menu items and the event crashes with a
-rem error message that we can't get rid of. I need to figure out a way to close that buffer but
-rem somehow the first buffer is closed anyway which is kind of weird. One way out is to just
-rem comment:
-rem
-rem -- textadept.menu.menubar[_L['_Tools']][_L['Select Co_mmand']][2],
-rem
-rem Maybe I should just copy all the files and remove code we don't need but ... let's delay that
-rem as it might get fixed. I'm in no hurry. \ No newline at end of file
diff --git a/context/data/textadept/context/textadept-context.sh b/context/data/textadept/context/textadept-context.sh
deleted file mode 100644
index 5f613ccf8..000000000
--- a/context/data/textadept/context/textadept-context.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-
-# copied from setuptex
-
-if [ z"$BASH_SOURCE" != z ]; then
- textadept -u $(cd -P -- "$(dirname -- "$BASH_SOURCE")" && pwd -P) "$@" &
-elif [ z"$KSH_VERSION" != z ]; then
- textadept -u $(cd -P -- "$(dirname -- "${.sh.file}")" && pwd -P) "$@" &
-else
- textadept -u $(cd -P -- "$(dirname -- "$0")" && pwd -P) "$@" &
-fi
-
diff --git a/context/data/textadept/context/themes/scite-context-theme.lua b/context/data/textadept/context/themes/scite-context-theme.lua
deleted file mode 100644
index f746c3d09..000000000
--- a/context/data/textadept/context/themes/scite-context-theme.lua
+++ /dev/null
@@ -1,159 +0,0 @@
-local info = {
- version = 1.002,
- comment = "theme for scintilla lpeg lexer for context/metafun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- context_path = string.split(os.resultof("mtxrun --find-file context.mkiv"))[1] or ""
-
--- What used to be proper Lua definitions are in 3.42 SciTE properties although
--- integration is still somewhat half. Also, the indexed style specification is
--- now a hash (which indeed makes more sense). However, the question is: am I
--- going to rewrite the style bit? It anyway makes more sense to keep this file
--- somewhat neutral as we no longer need to be compatible. However, we cannot be
--- sure of helpers being present yet when this file is loaded, so we are somewhat
--- crippled. On the other hand, I don't see other schemes being used with the
--- context lexers.
-
--- The next kludge is no longer needed which is good!
---
--- if GTK then -- WIN32 GTK OSX CURSES
--- font_name = '!' .. font_name
--- end
-
--- I need to play with these, some work ok:
---
--- eolfilled noteolfilled
--- characterset:u|l
--- visible notvisible
--- changeable notchangeable (this way we can protect styles, e.g. preamble?)
--- hotspot nothotspot
-
-if not lexers or not lexers.initialized then
-
- local font_name = 'Dejavu Sans Mono'
- local font_size = '14'
-
- local colors = {
- red = { 0x7F, 0x00, 0x00 },
- green = { 0x00, 0x7F, 0x00 },
- blue = { 0x00, 0x00, 0x7F },
- cyan = { 0x00, 0x7F, 0x7F },
- magenta = { 0x7F, 0x00, 0x7F },
- yellow = { 0x7F, 0x7F, 0x00 },
- orange = { 0xB0, 0x7F, 0x00 },
- --
- white = { 0xFF },
- light = { 0xCF },
- grey = { 0x80 },
- dark = { 0x4F },
- black = { 0x00 },
- --
- selection = { 0xF7 },
- logpanel = { 0xE7 },
- textpanel = { 0xCF },
- linepanel = { 0xA7 },
- tippanel = { 0x44 },
- --
- right = { 0x00, 0x00, 0xFF },
- wrong = { 0xFF, 0x00, 0x00 },
- }
-
- local styles = {
-
- ["whitespace"] = { },
- -- ["default"] = { font = font_name, size = font_size, fore = colors.black, back = colors.textpanel },
- -- ["default"] = { font = font_name, size = font_size, fore = colors.black },
- ["default"] = { font = font_name, size = font_size, fore = colors.black,
- back = textadept and colors.textpanel or nil },
- ["number"] = { fore = colors.cyan },
- ["comment"] = { fore = colors.yellow },
- ["keyword"] = { fore = colors.blue, bold = true },
- ["string"] = { fore = colors.magenta },
- -- ["preproc"] = { fore = colors.yellow, bold = true },
- ["error"] = { fore = colors.red },
- ["label"] = { fore = colors.red, bold = true },
-
- ["nothing"] = { },
- ["class"] = { fore = colors.black, bold = true },
- ["function"] = { fore = colors.black, bold = true },
- ["constant"] = { fore = colors.cyan, bold = true },
- ["operator"] = { fore = colors.blue },
- ["regex"] = { fore = colors.magenta },
- ["preprocessor"] = { fore = colors.yellow, bold = true },
- ["tag"] = { fore = colors.cyan },
- ["type"] = { fore = colors.blue },
- ["variable"] = { fore = colors.black },
- ["identifier"] = { },
-
- ["linenumber"] = { back = colors.linepanel },
- ["bracelight"] = { fore = colors.orange, bold = true },
- ["bracebad"] = { fore = colors.orange, bold = true },
- ["controlchar"] = { },
- ["indentguide"] = { fore = colors.linepanel, back = colors.white },
- ["calltip"] = { fore = colors.white, back = colors.tippanel },
-
- ["invisible"] = { back = colors.orange },
- ["quote"] = { fore = colors.blue, bold = true },
- ["special"] = { fore = colors.blue },
- ["extra"] = { fore = colors.yellow },
- ["embedded"] = { fore = colors.black, bold = true },
- ["char"] = { fore = colors.magenta },
- ["reserved"] = { fore = colors.magenta, bold = true },
- ["definition"] = { fore = colors.black, bold = true },
- ["okay"] = { fore = colors.dark },
- ["warning"] = { fore = colors.orange },
- ["standout"] = { fore = colors.orange, bold = true },
- ["command"] = { fore = colors.green, bold = true },
- ["internal"] = { fore = colors.orange, bold = true },
- ["preamble"] = { fore = colors.yellow },
- ["grouping"] = { fore = colors.red },
- ["primitive"] = { fore = colors.blue, bold = true },
- ["plain"] = { fore = colors.dark, bold = true },
- ["user"] = { fore = colors.green },
- ["data"] = { fore = colors.cyan, bold = true },
-
- -- equal to default:
-
- ["text"] = { font = font_name, size = font_size, fore = colors.black, back = colors.textpanel },
- ["text"] = { font = font_name, size = font_size, fore = colors.black },
-
- }
-
- local properties = {
- ["fold.by.parsing"] = 1,
- ["fold.by.indentation"] = 0,
- ["fold.by.line"] = 0,
- ["fold.line.comments"] = 0,
- --
- ["lexer.context.log"] = 1, -- log errors and warnings
- ["lexer.context.trace"] = 0, -- show loading, initializations etc
- ["lexer.context.detail"] = 0, -- show more detail when tracing
- ["lexer.context.show"] = 0, -- show result of lexing
- ["lexer.context.collapse"] = 0, -- make lexing results somewhat more efficient
- ["lexer.context.inspect"] = 0, -- show some info about lexer (styles and so)
- --
- -- ["lexer.context.log"] = 1, -- log errors and warnings
- -- ["lexer.context.trace"] = 1, -- show loading, initializations etc
- }
-
- ----- lexers = require("lexer")
- local lexer = require("scite-context-lexer")
- local context = lexer.context
-
- if context then
- context.inform("loading context (style) properties")
- if context.registerstyles then
- context.registerstyles(styles)
- end
- if context.registercolors then
- context.registercolors(colors)
- end
- if context.registerproperties then
- context.registerproperties(properties)
- end
- end
-
-end